WGAN(Abhijeet Singh)¶
Initial Model (Made Sure No Error)¶
In [1]:
import pandas as pd
import os
from torch.utils.data import Dataset, DataLoader
from PIL import Image
from torchvision import transforms
import torch
import torch.nn as nn
import torch.optim as optim
import time
import matplotlib.pyplot as plt
from PIL import Image, UnidentifiedImageError # Import UnidentifiedImageError
In [9]:
class YelpPhotoDataset(Dataset):
def __init__(self, photos_folder, image_size=64, max_samples=None):
self.photos_folder = photos_folder
self.image_size = image_size
self.image_files = [f for f in os.listdir(photos_folder) if f.endswith('.jpg')] # Only jpg images
if max_samples:
self.image_files = self.image_files[:max_samples] # Limit number of samples
def __getitem__(self, idx):
img_path = os.path.join(self.photos_folder, self.image_files[idx])
try:
image = Image.open(img_path).convert('RGB') # Open image
image = image.resize((self.image_size, self.image_size)) # Resize the image
image = transforms.ToTensor()(image) # Convert to tensor
return image
except (UnidentifiedImageError, IOError) as e:
# Catch errors related to invalid or unreadable images
print(f"Error loading image {img_path}: {e}")
return None # Return None if there's an error loading the image
def __len__(self):
return len(self.image_files)
# Define photos folder path
photos_folder = 'C:/Users/singh/Downloads/yelp_dataset/photos' # Replace with your correct path
# Custom collate function to filter out None values from batches
def collate_fn(batch):
batch = [item for item in batch if item is not None] # Remove None values
return torch.stack(batch, 0) if len(batch) > 0 else None # Only stack if there's valid data
# Create Dataset and DataLoader
dataset = YelpPhotoDataset(photos_folder, image_size=64, max_samples=50000) # Limit to 50000 images for testing
dataloader = DataLoader(dataset, batch_size=64, shuffle=True, num_workers=0, collate_fn=collate_fn)
# Define the Generator model (as before)
class Generator(nn.Module):
def __init__(self, latent_dim, img_channels, img_size):
super(Generator, self).__init__()
self.init_size = img_size // 4 # Starting size of the image after fully connected layer
self.fc = nn.Sequential(
nn.Linear(latent_dim, 128 * self.init_size ** 2) # Linear layer to convert latent dim to image size
)
self.conv_blocks = nn.Sequential(
nn.BatchNorm2d(128), # Normalize the data to prevent training instability
nn.Upsample(scale_factor=2), # Upsample image
nn.Conv2d(128, 128, 3, stride=1, padding=1), # Convolutional layer
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Upsample(scale_factor=2), # Another upsample
nn.Conv2d(128, img_channels, 3, stride=1, padding=1), # Output layer to generate image
nn.Tanh(), # Tanh to scale output between -1 and 1 (standard for GANs)
)
def forward(self, z):
out = self.fc(z)
out = out.view(out.shape[0], 128, self.init_size, self.init_size) # Reshape after FC layer
img = self.conv_blocks(out)
return img
# Define the Critic model (as before)
class Critic(nn.Module):
def __init__(self, img_channels, img_size):
super(Critic, self).__init__()
# Example: img_size = 64 (assuming a 64x64 image)
# Manually calculate output size after each layer
conv1_output = (img_size - 3 + 2*1) // 2 + 1 # Kernel size 3, padding 1, stride 2
conv2_output = (conv1_output - 3 + 2*1) // 2 + 1 # Kernel size 3, padding 1, stride 2
conv3_output = (conv2_output - 3 + 2*1) // 2 + 1 # Kernel size 3, padding 1, stride 2
conv4_output = (conv3_output - 3 + 2*1) // 2 + 1 # Kernel size 3, padding 1, stride 2
# Calculate the flattened size (number of features) after all convolutional layers
flattened_size = conv4_output * conv4_output * 512 # 512 channels after the last Conv layer
# Define the convolutional layers
self.model = nn.Sequential(
nn.Conv2d(img_channels, 64, kernel_size=3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(256, 512, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.2, inplace=True)
)
# Fully connected layer with correct input size
self.fc = nn.Linear(flattened_size, 1)
def forward(self, img):
x = self.model(img) # Apply convolutional layers
x = x.view(x.size(0), -1) # Flatten the output (batch_size, num_features)
output = self.fc(x) # Pass through the fully connected layer
return output
from PIL import Image, UnidentifiedImageError
In [ ]:
# Hyperparameters
latent_dim = 100 # Latent vector dimension
img_size = 64 # Size of generated images
img_channels = 3 # Number of image channels (RGB)
epochs = 20 # Number of epochs for training
batch_size = 64 # Batch size for training
lambda_gp = 10 # Gradient penalty weight (used for Wasserstein loss regularization)
# Initialize models and optimizers
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
generator = Generator(latent_dim, img_channels, img_size).to(device)
critic = Critic(img_channels, img_size).to(device)
lr = 0.0002 # Learning rate
optimizer_G = optim.Adam(generator.parameters(), lr=lr, betas=(0.5, 0.999)) # Optimizer for Generator
optimizer_C = optim.Adam(critic.parameters(), lr=lr, betas=(0.5, 0.999)) # Optimizer for Critic
# Gradient Penalty Calculation
def compute_gradient_penalty(critic, real_samples, fake_samples):
alpha = torch.randn((real_samples.size(0), 1, 1, 1), device=real_samples.device)
interpolates = (alpha * real_samples + (1 - alpha) * fake_samples).requires_grad_(True)
d_interpolates = critic(interpolates)
fake = torch.ones(d_interpolates.size(), requires_grad=False, device=real_samples.device)
gradients = torch.autograd.grad(
outputs=d_interpolates,
inputs=interpolates,
grad_outputs=fake,
create_graph=True,
retain_graph=True,
only_inputs=True,
)[0]
gradients = gradients.view(gradients.size(0), -1)
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean()
return gradient_penalty
# Safe image loading function
def safe_load_image(image_path):
try:
# Try loading the image
return Image.open(image_path)
except (UnidentifiedImageError, IOError) as e:
# Catch errors related to invalid or unreadable images
print(f"Error loading image {image_path}: {str(e)}")
return None # Return None if the image can't be loaded
# Training Loop
gen_losses = []
critic_losses = []
for epoch in range(epochs):
print(f"Epoch {epoch + 1}/{epochs} start")
for i, imgs in enumerate(dataloader):
if imgs is None: # Skip batches with no valid images
continue
batch_size = imgs.size(0) # Get the current batch size (could be less than the expected batch size)
imgs = imgs.to(device) # Move batch to GPU
# Train Critic
optimizer_C.zero_grad()
z = torch.randn(batch_size, latent_dim, device=device) # Generate random latent vector with the same batch size
fake_imgs = generator(z) # Generate fake images
critic_real = critic(imgs) # Real image critic score
critic_fake = critic(fake_imgs.detach()) # Fake image critic score
gradient_penalty = compute_gradient_penalty(critic, imgs, fake_imgs)
critic_loss = critic_fake.mean() - critic_real.mean() + lambda_gp * gradient_penalty
critic_loss.backward()
optimizer_C.step()
# Train Generator every 5th step
if i % 5 == 0:
optimizer_G.zero_grad()
fake_imgs = generator(z) # Generate fake images
critic_fake = critic(fake_imgs) # Fake image critic score
gen_loss = -critic_fake.mean() # Generator loss (Wasserstein GAN)
gen_loss.backward()
optimizer_G.step()
# Track losses for later analysis
gen_losses.append(gen_loss.item())
critic_losses.append(critic_loss.item())
print(f"Epoch {epoch + 1} finished")
# Plot the losses
plt.plot(gen_losses, label='Generator Loss')
plt.plot(critic_losses, label='Critic Loss')
plt.legend()
plt.show()
# Save the trained models and optimizers
torch.save({
'epoch': epoch,
'generator_state_dict': generator.state_dict(),
'critic_state_dict': critic.state_dict(),
'optimizer_G_state_dict': optimizer_G.state_dict(),
'optimizer_C_state_dict': optimizer_C.state_dict(),
'losses': (gen_losses, critic_losses)
}, 'wgan_model_v_initial.pth')
Epoch 1/20 start Epoch 1 finished Epoch 2/20 start Epoch 2 finished Epoch 3/20 start Epoch 3 finished Epoch 4/20 start Epoch 4 finished Epoch 5/20 start Epoch 5 finished Epoch 6/20 start Epoch 6 finished Epoch 7/20 start Epoch 7 finished Epoch 8/20 start Epoch 8 finished Epoch 9/20 start Epoch 9 finished Epoch 10/20 start Epoch 10 finished Epoch 11/20 start Epoch 11 finished Epoch 12/20 start Epoch 12 finished Epoch 13/20 start Epoch 13 finished Epoch 14/20 start Epoch 14 finished Epoch 15/20 start Epoch 15 finished Epoch 16/20 start Epoch 16 finished Epoch 17/20 start Epoch 17 finished Epoch 18/20 start Epoch 18 finished Epoch 19/20 start Epoch 19 finished Epoch 20/20 start Epoch 20 finished
Run 2- Model running to see Image generation¶
In [4]:
import torchvision
import pandas as pd
import os
from torch.utils.data import Dataset, DataLoader
from PIL import Image
from torchvision import transforms
import torch
import torch.nn as nn
import torch.optim as optim
import time
import matplotlib.pyplot as plt
from PIL import Image, UnidentifiedImageError # Import UnidentifiedImageError
class YelpPhotoDataset(Dataset):
def __init__(self, photos_folder, image_size=64, max_samples=None):
self.photos_folder = photos_folder
self.image_size = image_size
self.image_files = [f for f in os.listdir(photos_folder) if f.endswith('.jpg')] # Only jpg images
if max_samples:
self.image_files = self.image_files[:max_samples] # Limit number of samples
def __getitem__(self, idx):
img_path = os.path.join(self.photos_folder, self.image_files[idx])
try:
image = Image.open(img_path).convert('RGB') # Open image
image = image.resize((self.image_size, self.image_size)) # Resize the image
image = transforms.ToTensor()(image) # Convert to tensor
return image
except (UnidentifiedImageError, IOError) as e:
# Catch errors related to invalid or unreadable images
print(f"Error loading image {img_path}: {e}")
return None # Return None if there's an error loading the image
def __len__(self):
return len(self.image_files)
# Define photos folder path
photos_folder = 'C:/Users/singh/Downloads/yelp_dataset/photos' # Replace with your correct path
# Custom collate function to filter out None values from batches
def collate_fn(batch):
batch = [item for item in batch if item is not None] # Remove None values
return torch.stack(batch, 0) if len(batch) > 0 else None # Only stack if there's valid data
# Create Dataset and DataLoader
dataset = YelpPhotoDataset(photos_folder, image_size=64, max_samples=500) # Limit to 2000 images for testing
dataloader = DataLoader(dataset, batch_size=64, shuffle=True, num_workers=0, collate_fn=collate_fn)
class Generator(nn.Module):
def __init__(self):
super(Generator, self).__init__()
self.init_size = 64 // 4 # Output size after upscaling
self.fc = nn.Sequential(
nn.Linear(256, 128 * self.init_size ** 2), # Adjust for latent_dim
nn.BatchNorm1d(128 * self.init_size ** 2),
nn.LeakyReLU(0.2, inplace=True),
)
self.conv_blocks = nn.Sequential(
nn.Upsample(scale_factor=2), # 16x16 -> 32x32
nn.Conv2d(128, 128, 3, stride=1, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2, inplace=True),
nn.Upsample(scale_factor=2), # 32x32 -> 64x64
nn.Conv2d(128, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(64, 3, 3, stride=1, padding=1),
nn.Tanh(), # Output values in range [-1, 1]
)
def forward(self, z):
out = self.fc(z)
out = out.view(out.size(0), 128, self.init_size, self.init_size)
img = self.conv_blocks(out)
return img
# Define the Critic model
class Critic(nn.Module):
def __init__(self, img_channels, img_size):
super(Critic, self).__init__()
# Example: img_size = 64 (assuming a 64x64 image)
# Manually calculate output size after each layer
conv1_output = (img_size - 3 + 2*1) // 2 + 1 # Kernel size 3, padding 1, stride 2
conv2_output = (conv1_output - 3 + 2*1) // 2 + 1 # Kernel size 3, padding 1, stride 2
conv3_output = (conv2_output - 3 + 2*1) // 2 + 1 # Kernel size 3, padding 1, stride 2
conv4_output = (conv3_output - 3 + 2*1) // 2 + 1 # Kernel size 3, padding 1, stride 2
# Calculate the flattened size (number of features) after all convolutional layers
flattened_size = conv4_output * conv4_output * 512 # 512 channels after the last Conv layer
# Define the convolutional layers
self.model = nn.Sequential(
nn.Conv2d(img_channels, 64, kernel_size=3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(256, 512, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.2, inplace=True)
)
# Fully connected layer with correct input size
self.fc = nn.Linear(flattened_size, 1)
def forward(self, img):
x = self.model(img) # Apply convolutional layers
x = x.view(x.size(0), -1) # Flatten the output (batch_size, num_features)
output = self.fc(x) # Pass through the fully connected layer
return output
In [13]:
# Hyperparameters
latent_dim = 256 # Latent vector dimension
img_size = 64 # Size of generated images
img_channels = 3 # Number of image channels (RGB)
epochs = 10 # Number of epochs for training
batch_size = 64 # Batch size for training
Experiment Done¶
In [ ]:
lambda_gp = 3 # Gradient penalty weight (used for Wasserstein loss regularization)
# Initialize models and optimizers
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
generator = Generator(latent_dim, img_channels, img_size).to(device)
critic = Critic(img_channels, img_size).to(device)
lr = 0.0001 # Learning rate
lr2= 0.0002 # Learning rate
optimizer_G = optim.Adam(generator.parameters(), lr=lr, betas=(0.5, 0.999)) # Optimizer for Generator
optimizer_C = optim.Adam(critic.parameters(), lr=lr2, betas=(0.5, 0.999)) # Optimizer for Critic
# Gradient Penalty Calculation
def compute_gradient_penalty(critic, real_samples, fake_samples):
alpha = torch.randn((real_samples.size(0), 1, 1, 1), device=real_samples.device)
interpolates = (alpha * real_samples + (1 - alpha) * fake_samples).requires_grad_(True)
d_interpolates = critic(interpolates)
fake = torch.ones(d_interpolates.size(), requires_grad=False, device=real_samples.device)
gradients = torch.autograd.grad(
outputs=d_interpolates,
inputs=interpolates,
grad_outputs=fake,
create_graph=True,
retain_graph=True,
only_inputs=True,
)[0]
gradients = gradients.view(gradients.size(0), -1)
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean()
return gradient_penalty
# Training Loop
gen_losses = []
critic_losses = []
# Save generated images
def save_generated_images(epoch, fixed_z):
with torch.no_grad():
gen_imgs = generator(fixed_z).cpu()
gen_imgs = gen_imgs * 0.5 + 0.5 # Rescale images to [0,1]
grid = torchvision.utils.make_grid(gen_imgs, nrow=4, normalize=True)
plt.imshow(grid.permute(1, 2, 0))
plt.title(f"Generated Images at Epoch {epoch+1}")
plt.axis('off')
plt.show()
# Random vector to visualize the output throughout training
fixed_z = torch.randn(16, latent_dim, device=device)
for epoch in range(epochs):
print(f"Epoch {epoch + 1}/{epochs} start")
for i, imgs in enumerate(dataloader):
if imgs is None: # Skip batches with no valid images
continue
batch_size = imgs.size(0) # Get the current batch size (could be less than the expected batch size)
imgs = imgs.to(device) # Move batch to GPU
# Train Critic
optimizer_C.zero_grad()
z = torch.randn(batch_size, latent_dim, device=device) # Generate random latent vector with the same batch size
fake_imgs = generator(z) # Generate fake images
critic_real = critic(imgs) # Real image critic score
critic_fake = critic(fake_imgs.detach()) # Fake image critic score
gradient_penalty = compute_gradient_penalty(critic, imgs, fake_imgs)
critic_loss = critic_fake.mean() - critic_real.mean() + lambda_gp * gradient_penalty
critic_loss.backward()
optimizer_C.step()
# Train Generator every 5th step
if i % 1 == 0:
optimizer_G.zero_grad()
fake_imgs = generator(z) # Generate fake images
critic_fake = critic(fake_imgs) # Fake image critic score
gen_loss = -critic_fake.mean() # Generator loss (Wasserstein GAN)
gen_loss.backward()
optimizer_G.step()
print(f"Batch {i + 1}/{len(dataloader)}, Critic Loss: {critic_loss.item():.4f}, Generator Loss: {gen_loss.item():.4f}")
# Track losses for later analysis
gen_losses.append(gen_loss.item())
critic_losses.append(critic_loss.item())
# Save images periodically or at the end
save_generated_images(epoch, fixed_z)
# Plot the losses
plt.plot(gen_losses, label='Generator Loss')
plt.plot(critic_losses, label='Critic Loss')
plt.legend()
plt.show()
Epoch 1/10 start Batch 1/782, Critic Loss: -0.1545, Generator Loss: 0.7623 Batch 2/782, Critic Loss: -1.2868, Generator Loss: 0.5026 Batch 3/782, Critic Loss: -2.4178, Generator Loss: 0.4462 Batch 4/782, Critic Loss: -3.1526, Generator Loss: 0.3766 Batch 5/782, Critic Loss: -4.3697, Generator Loss: 0.3498 Batch 6/782, Critic Loss: -5.5110, Generator Loss: 0.8076 Batch 7/782, Critic Loss: -6.2281, Generator Loss: 1.6307 Batch 8/782, Critic Loss: -8.1086, Generator Loss: 3.2932 Batch 9/782, Critic Loss: -10.2837, Generator Loss: 4.9312 Batch 10/782, Critic Loss: -10.3419, Generator Loss: 5.7903 Batch 11/782, Critic Loss: -12.2166, Generator Loss: 8.2402 Batch 12/782, Critic Loss: -13.8045, Generator Loss: 8.7484 Batch 13/782, Critic Loss: -17.3800, Generator Loss: 11.1429 Batch 14/782, Critic Loss: -20.6543, Generator Loss: 13.9762 Batch 15/782, Critic Loss: -22.7188, Generator Loss: 12.3415 Batch 16/782, Critic Loss: -24.1415, Generator Loss: 16.7590 Batch 17/782, Critic Loss: -28.5478, Generator Loss: 18.4589 Batch 18/782, Critic Loss: -29.5567, Generator Loss: 15.8138 Batch 19/782, Critic Loss: -30.5183, Generator Loss: 21.9670 Batch 20/782, Critic Loss: -36.0543, Generator Loss: 22.0617 Batch 21/782, Critic Loss: -41.7139, Generator Loss: 23.5980 Batch 22/782, Critic Loss: -45.1147, Generator Loss: 25.7606 Batch 23/782, Critic Loss: -44.6499, Generator Loss: 27.4822 Batch 24/782, Critic Loss: -48.5648, Generator Loss: 27.4981 Batch 25/782, Critic Loss: -50.2602, Generator Loss: 28.0843 Batch 26/782, Critic Loss: -40.8256, Generator Loss: 32.4437 Batch 27/782, Critic Loss: -27.7323, Generator Loss: 30.1198 Batch 28/782, Critic Loss: -49.5263, Generator Loss: 12.3010 Batch 29/782, Critic Loss: -29.0586, Generator Loss: 32.8033 Batch 30/782, Critic Loss: -47.7676, Generator Loss: 32.9972 Batch 31/782, Critic Loss: -54.1583, Generator Loss: 29.6562 Batch 32/782, Critic Loss: -57.2578, Generator Loss: 34.3643 Batch 33/782, Critic Loss: -51.5932, Generator Loss: 29.2199 Batch 34/782, Critic Loss: -51.6801, Generator Loss: 36.3844 Batch 35/782, Critic Loss: -57.6863, Generator Loss: 27.9945 Batch 36/782, Critic Loss: -37.8771, Generator Loss: 32.4102 Batch 37/782, Critic Loss: -39.6279, Generator Loss: 29.1639 Batch 38/782, Critic Loss: -57.0816, Generator Loss: 26.8826 Batch 39/782, Critic Loss: -53.6330, Generator Loss: 36.8155 Batch 40/782, Critic Loss: -66.7934, Generator Loss: 35.1652 Batch 41/782, Critic Loss: -68.9575, Generator Loss: 34.2166 Batch 42/782, Critic Loss: -74.6962, Generator Loss: 38.2650 Batch 43/782, Critic Loss: -81.5916, Generator Loss: 42.7807 Batch 44/782, Critic Loss: -85.8359, Generator Loss: 44.5332 Batch 45/782, Critic Loss: -88.0522, Generator Loss: 46.9383 Batch 46/782, Critic Loss: -82.0628, Generator Loss: 47.8628 Batch 47/782, Critic Loss: -90.0620, Generator Loss: 40.3127 Batch 48/782, Critic Loss: -87.1211, Generator Loss: 50.0714 Batch 49/782, Critic Loss: -98.0607, Generator Loss: 50.3645 Batch 50/782, Critic Loss: -96.4545, Generator Loss: 52.7994 Batch 51/782, Critic Loss: -95.0799, Generator Loss: 47.3200 Batch 52/782, Critic Loss: -97.6695, Generator Loss: 56.8591 Batch 53/782, Critic Loss: -96.6458, Generator Loss: 50.3470 Batch 54/782, Critic Loss: -99.9136, Generator Loss: 57.2553 Batch 55/782, Critic Loss: -108.4014, Generator Loss: 59.4747 Batch 56/782, Critic Loss: -108.4652, Generator Loss: 55.2435 Batch 57/782, Critic Loss: -116.1821, Generator Loss: 61.1498 Batch 58/782, Critic Loss: -115.5505, Generator Loss: 64.9152 Batch 59/782, Critic Loss: -101.2800, Generator Loss: 52.1304 Batch 60/782, Critic Loss: -100.7405, Generator Loss: 65.1555 Batch 61/782, Critic Loss: -119.1761, Generator Loss: 67.7555 Batch 62/782, Critic Loss: -118.7084, Generator Loss: 33.7476 Batch 63/782, Critic Loss: 104.3743, Generator Loss: 68.7875 Batch 64/782, Critic Loss: -4.6913, Generator Loss: 68.5889 Batch 65/782, Critic Loss: -6.6850, Generator Loss: 68.1610 Batch 66/782, Critic Loss: -5.9890, Generator Loss: 67.8009 Batch 67/782, Critic Loss: -6.1730, Generator Loss: 67.1753 Batch 68/782, Critic Loss: -8.4756, Generator Loss: 66.7974 Batch 69/782, Critic Loss: -7.3881, Generator Loss: 65.8934 Batch 70/782, Critic Loss: -9.4112, Generator Loss: 65.0549 Batch 71/782, Critic Loss: -10.7900, Generator Loss: 64.9932 Batch 72/782, Critic Loss: -10.6497, Generator Loss: 64.4703 Batch 73/782, Critic Loss: -11.5010, Generator Loss: 64.1363 Batch 74/782, Critic Loss: -9.7996, Generator Loss: 63.9453 Batch 75/782, Critic Loss: -9.3568, Generator Loss: 63.4329 Batch 76/782, Critic Loss: -10.6575, Generator Loss: 63.2067 Batch 77/782, Critic Loss: -11.4010, Generator Loss: 63.4607 Batch 78/782, Critic Loss: -11.2797, Generator Loss: 63.3856 Batch 79/782, Critic Loss: -10.5104, Generator Loss: 62.0886 Batch 80/782, Critic Loss: -10.7137, Generator Loss: 61.2741 Batch 81/782, Critic Loss: -11.1660, Generator Loss: 60.9077 Batch 82/782, Critic Loss: -11.9232, Generator Loss: 61.3048 Batch 83/782, Critic Loss: -10.7263, Generator Loss: 60.5139 Batch 84/782, Critic Loss: -12.0552, Generator Loss: 60.1307 Batch 85/782, Critic Loss: -7.6001, Generator Loss: 58.6983 Batch 86/782, Critic Loss: -8.5322, Generator Loss: 57.8967 Batch 87/782, Critic Loss: -6.6436, Generator Loss: 56.6356 Batch 88/782, Critic Loss: -8.7891, Generator Loss: 59.0498 Batch 89/782, Critic Loss: -8.6286, Generator Loss: 56.1244 Batch 90/782, Critic Loss: -7.6057, Generator Loss: 56.5847 Batch 91/782, Critic Loss: -8.5224, Generator Loss: 56.2255 Batch 92/782, Critic Loss: -9.5731, Generator Loss: 56.3759 Batch 93/782, Critic Loss: -7.5170, Generator Loss: 55.1616 Batch 94/782, Critic Loss: -9.0609, Generator Loss: 55.7327 Batch 95/782, Critic Loss: -9.8099, Generator Loss: 55.6609 Batch 96/782, Critic Loss: -11.7388, Generator Loss: 56.8138 Batch 97/782, Critic Loss: -10.9138, Generator Loss: 58.3465 Batch 98/782, Critic Loss: -9.9857, Generator Loss: 55.4156 Batch 99/782, Critic Loss: -10.4957, Generator Loss: 57.2742 Batch 100/782, Critic Loss: -12.2621, Generator Loss: 55.9036 Batch 101/782, Critic Loss: -10.8721, Generator Loss: 56.9277 Batch 102/782, Critic Loss: -13.5196, Generator Loss: 57.4731 Batch 103/782, Critic Loss: -10.3074, Generator Loss: 56.2908 Batch 104/782, Critic Loss: -12.1604, Generator Loss: 58.1558 Batch 105/782, Critic Loss: -14.1963, Generator Loss: 58.3510 Batch 106/782, Critic Loss: -9.7848, Generator Loss: 57.8430 Batch 107/782, Critic Loss: -14.3394, Generator Loss: 59.9763 Batch 108/782, Critic Loss: -16.8005, Generator Loss: 60.4116 Batch 109/782, Critic Loss: -18.5216, Generator Loss: 61.2966 Batch 110/782, Critic Loss: -15.1921, Generator Loss: 59.1503 Batch 111/782, Critic Loss: -18.5220, Generator Loss: 62.6151 Batch 112/782, Critic Loss: -22.8234, Generator Loss: 62.0894 Batch 113/782, Critic Loss: -25.3256, Generator Loss: 63.8628 Batch 114/782, Critic Loss: -26.8389, Generator Loss: 64.5069 Batch 115/782, Critic Loss: -31.7257, Generator Loss: 66.0417 Batch 116/782, Critic Loss: -35.8104, Generator Loss: 67.2349 Batch 117/782, Critic Loss: -37.5463, Generator Loss: 67.7168 Batch 118/782, Critic Loss: -42.4294, Generator Loss: 68.4548 Batch 119/782, Critic Loss: -48.0730, Generator Loss: 69.8506 Batch 120/782, Critic Loss: -52.6629, Generator Loss: 70.1785 Batch 121/782, Critic Loss: -57.0899, Generator Loss: 70.6718 Batch 122/782, Critic Loss: -59.8629, Generator Loss: 71.3462 Batch 123/782, Critic Loss: -56.7328, Generator Loss: 71.4224 Batch 124/782, Critic Loss: -64.9946, Generator Loss: 71.5469 Batch 125/782, Critic Loss: -61.1288, Generator Loss: 65.8577 Batch 126/782, Critic Loss: -45.1769, Generator Loss: 70.3743 Batch 127/782, Critic Loss: -28.0992, Generator Loss: 72.4131 Batch 128/782, Critic Loss: -54.3868, Generator Loss: 64.2652 Batch 129/782, Critic Loss: -66.2827, Generator Loss: 67.3609 Batch 130/782, Critic Loss: -69.8692, Generator Loss: 45.7124 Batch 131/782, Critic Loss: -41.6230, Generator Loss: 78.8186 Batch 132/782, Critic Loss: -62.8745, Generator Loss: 64.8074 Batch 133/782, Critic Loss: -80.4049, Generator Loss: 69.8267 Batch 134/782, Critic Loss: -79.8645, Generator Loss: 55.1703 Batch 135/782, Critic Loss: -84.2433, Generator Loss: 78.3554 Batch 136/782, Critic Loss: -58.9313, Generator Loss: 48.6398 Batch 137/782, Critic Loss: -76.0514, Generator Loss: 74.8044 Batch 138/782, Critic Loss: -66.6368, Generator Loss: 49.4207 Batch 139/782, Critic Loss: -68.5877, Generator Loss: 79.6815 Batch 140/782, Critic Loss: -83.5575, Generator Loss: 42.3322 Batch 141/782, Critic Loss: -60.5420, Generator Loss: 80.2994 Batch 142/782, Critic Loss: -51.8563, Generator Loss: 58.3308 Batch 143/782, Critic Loss: -105.4016, Generator Loss: 61.8031 Batch 144/782, Critic Loss: -83.0136, Generator Loss: 75.4355 Batch 145/782, Critic Loss: -93.6119, Generator Loss: -20.5608 Batch 146/782, Critic Loss: -39.7544, Generator Loss: 71.0523 Batch 147/782, Critic Loss: -112.9547, Generator Loss: 84.8102 Batch 148/782, Critic Loss: -87.4445, Generator Loss: -22.6627 Batch 149/782, Critic Loss: -28.8851, Generator Loss: 38.4531 Batch 150/782, Critic Loss: -53.5606, Generator Loss: 85.9036 Batch 151/782, Critic Loss: -55.7038, Generator Loss: 43.9417 Batch 152/782, Critic Loss: -69.5852, Generator Loss: 76.1332 Batch 153/782, Critic Loss: -65.9373, Generator Loss: 54.6224 Batch 154/782, Critic Loss: -94.7319, Generator Loss: 42.6095 Batch 155/782, Critic Loss: -59.9664, Generator Loss: 85.1431 Batch 156/782, Critic Loss: -74.6509, Generator Loss: -18.2882 Batch 157/782, Critic Loss: -42.7883, Generator Loss: 73.9126 Batch 158/782, Critic Loss: -51.5402, Generator Loss: 77.0462 Batch 159/782, Critic Loss: -68.1429, Generator Loss: 8.2591 Batch 160/782, Critic Loss: -49.9775, Generator Loss: 89.6462 Batch 161/782, Critic Loss: -40.1728, Generator Loss: 73.3157 Batch 162/782, Critic Loss: -88.8612, Generator Loss: -16.4385 Batch 163/782, Critic Loss: -43.6792, Generator Loss: 75.7531 Batch 164/782, Critic Loss: -52.6240, Generator Loss: 68.9833 Batch 165/782, Critic Loss: -77.9286, Generator Loss: 11.9699 Batch 166/782, Critic Loss: -79.4656, Generator Loss: 82.9131 Batch 167/782, Critic Loss: -58.3006, Generator Loss: 75.6235 Batch 168/782, Critic Loss: -98.1159, Generator Loss: -48.3698 Batch 169/782, Critic Loss: -16.1591, Generator Loss: -23.6606 Batch 170/782, Critic Loss: -32.8692, Generator Loss: -5.2642 Batch 171/782, Critic Loss: -25.6606, Generator Loss: 49.9365 Batch 172/782, Critic Loss: -64.7726, Generator Loss: 67.7892 Batch 173/782, Critic Loss: -43.6619, Generator Loss: 60.8716 Batch 174/782, Critic Loss: -59.3094, Generator Loss: 56.4938 Batch 175/782, Critic Loss: -90.5956, Generator Loss: 47.5551 Batch 176/782, Critic Loss: -73.9184, Generator Loss: 89.1105 Batch 177/782, Critic Loss: -67.3189, Generator Loss: -60.5225 Batch 178/782, Critic Loss: -21.9349, Generator Loss: 46.1559 Batch 179/782, Critic Loss: -83.7698, Generator Loss: 83.4806 Batch 180/782, Critic Loss: -56.6289, Generator Loss: 34.6045 Batch 181/782, Critic Loss: -58.3563, Generator Loss: 79.4370 Batch 182/782, Critic Loss: -89.9386, Generator Loss: 3.7044 Batch 183/782, Critic Loss: -67.0624, Generator Loss: 104.3116 Batch 184/782, Critic Loss: -25.3364, Generator Loss: 101.5489 Batch 185/782, Critic Loss: -24.9638, Generator Loss: 73.2511 Batch 186/782, Critic Loss: -47.8380, Generator Loss: 45.9533 Batch 187/782, Critic Loss: -54.6000, Generator Loss: 30.5357 Batch 188/782, Critic Loss: -88.5184, Generator Loss: 102.0719 Batch 189/782, Critic Loss: -32.3987, Generator Loss: 76.8185 Batch 190/782, Critic Loss: -41.8620, Generator Loss: 73.2098 Batch 191/782, Critic Loss: -74.3228, Generator Loss: 49.2852 Batch 192/782, Critic Loss: -80.8543, Generator Loss: 84.7001 Batch 193/782, Critic Loss: -88.7602, Generator Loss: -20.9210 Batch 194/782, Critic Loss: -14.8540, Generator Loss: 107.0849 Batch 195/782, Critic Loss: -53.1562, Generator Loss: 93.9883 Batch 196/782, Critic Loss: -88.0556, Generator Loss: -5.0516 Batch 197/782, Critic Loss: -38.5629, Generator Loss: 42.1169 Batch 198/782, Critic Loss: -31.6592, Generator Loss: 113.8461 Batch 199/782, Critic Loss: -34.6183, Generator Loss: 95.3346 Batch 200/782, Critic Loss: -66.4094, Generator Loss: 65.1052 Batch 201/782, Critic Loss: -78.1255, Generator Loss: 94.6133 Batch 202/782, Critic Loss: -98.4661, Generator Loss: 29.1580 Batch 203/782, Critic Loss: -44.4663, Generator Loss: 97.6002 Batch 204/782, Critic Loss: -32.9267, Generator Loss: 88.1009 Batch 205/782, Critic Loss: -103.4030, Generator Loss: 54.9985 Batch 206/782, Critic Loss: -69.1150, Generator Loss: 121.7305 Batch 207/782, Critic Loss: -12.5892, Generator Loss: 120.7758 Batch 208/782, Critic Loss: -6.2792, Generator Loss: 120.3104 Batch 209/782, Critic Loss: -5.6558, Generator Loss: 119.4658 Batch 210/782, Critic Loss: -5.3076, Generator Loss: 119.4778 Batch 211/782, Critic Loss: -5.2965, Generator Loss: 118.3484 Batch 212/782, Critic Loss: -7.4614, Generator Loss: 117.5604 Batch 213/782, Critic Loss: -7.7907, Generator Loss: 117.2520 Batch 214/782, Critic Loss: -9.5647, Generator Loss: 115.2103 Batch 215/782, Critic Loss: -9.0786, Generator Loss: 112.5268 Batch 216/782, Critic Loss: -10.6167, Generator Loss: 110.5318 Batch 217/782, Critic Loss: -8.7485, Generator Loss: 109.2906 Batch 218/782, Critic Loss: -15.1557, Generator Loss: 108.3020 Batch 219/782, Critic Loss: -7.6453, Generator Loss: 105.9376 Batch 220/782, Critic Loss: -16.8340, Generator Loss: 106.1250 Batch 221/782, Critic Loss: -9.1758, Generator Loss: 104.9426 Batch 222/782, Critic Loss: -11.0093, Generator Loss: 106.5619 Batch 223/782, Critic Loss: -18.7394, Generator Loss: 104.6631 Batch 224/782, Critic Loss: -12.8472, Generator Loss: 101.2984 Batch 225/782, Critic Loss: -24.1429, Generator Loss: 98.1194 Batch 226/782, Critic Loss: -27.5847, Generator Loss: 82.8000 Batch 227/782, Critic Loss: -31.0936, Generator Loss: 96.8028 Batch 228/782, Critic Loss: -33.2409, Generator Loss: 96.8867 Batch 229/782, Critic Loss: -42.4162, Generator Loss: 67.8805 Batch 230/782, Critic Loss: -62.1202, Generator Loss: 81.4190 Batch 231/782, Critic Loss: -61.0849, Generator Loss: 9.1369 Batch 232/782, Critic Loss: -49.8779, Generator Loss: 91.4436 Batch 233/782, Critic Loss: -71.9660, Generator Loss: 57.9287 Batch 234/782, Critic Loss: -51.5273, Generator Loss: 98.6487 Batch 235/782, Critic Loss: -49.2584, Generator Loss: 63.6840 Batch 236/782, Critic Loss: -71.3164, Generator Loss: 66.7762 Batch 237/782, Critic Loss: -77.9536, Generator Loss: 86.7961 Batch 238/782, Critic Loss: -96.6474, Generator Loss: -69.9094 Batch 239/782, Critic Loss: -15.8451, Generator Loss: 20.6338 Batch 240/782, Critic Loss: -64.1349, Generator Loss: 119.7038 Batch 241/782, Critic Loss: -30.2998, Generator Loss: 106.0682 Batch 242/782, Critic Loss: -64.5663, Generator Loss: 5.9546 Batch 243/782, Critic Loss: -31.5199, Generator Loss: 59.4814 Batch 244/782, Critic Loss: 8.3139, Generator Loss: 46.4114 Batch 245/782, Critic Loss: 48.9886, Generator Loss: 116.0885 Batch 246/782, Critic Loss: -24.2199, Generator Loss: 109.8796 Batch 247/782, Critic Loss: -50.4658, Generator Loss: 90.9523 Batch 248/782, Critic Loss: -107.0538, Generator Loss: 93.0986 Batch 249/782, Critic Loss: -40.6987, Generator Loss: 85.2600 Batch 250/782, Critic Loss: -5.0003, Generator Loss: 91.0940 Batch 251/782, Critic Loss: -22.1678, Generator Loss: 107.2722 Batch 252/782, Critic Loss: -46.3753, Generator Loss: 105.7838 Batch 253/782, Critic Loss: -61.0981, Generator Loss: 98.5709 Batch 254/782, Critic Loss: -31.3114, Generator Loss: 81.7667 Batch 255/782, Critic Loss: -39.3318, Generator Loss: 65.4896 Batch 256/782, Critic Loss: -77.5939, Generator Loss: 94.6863 Batch 257/782, Critic Loss: -81.3736, Generator Loss: -30.3876 Batch 258/782, Critic Loss: -26.5691, Generator Loss: 65.1694 Batch 259/782, Critic Loss: -40.9770, Generator Loss: 71.8157 Batch 260/782, Critic Loss: -94.0779, Generator Loss: 25.8667 Batch 261/782, Critic Loss: -10.2276, Generator Loss: 119.5760 Batch 262/782, Critic Loss: -35.0954, Generator Loss: 94.2171 Batch 263/782, Critic Loss: -46.0813, Generator Loss: 11.7157 Batch 264/782, Critic Loss: -36.0994, Generator Loss: 109.8704 Batch 265/782, Critic Loss: -78.4489, Generator Loss: 70.4849 Batch 266/782, Critic Loss: -57.1065, Generator Loss: 125.8947 Batch 267/782, Critic Loss: -69.8573, Generator Loss: 36.2805 Batch 268/782, Critic Loss: -45.7966, Generator Loss: 44.0302 Batch 269/782, Critic Loss: -40.0267, Generator Loss: 96.7012 Batch 270/782, Critic Loss: -66.5895, Generator Loss: 55.7533 Batch 271/782, Critic Loss: -51.4965, Generator Loss: 109.6469 Batch 272/782, Critic Loss: -85.1989, Generator Loss: 0.8725 Batch 273/782, Critic Loss: -76.2790, Generator Loss: 129.7834 Batch 274/782, Critic Loss: -36.7595, Generator Loss: 121.7518 Batch 275/782, Critic Loss: -68.9615, Generator Loss: 43.5635 Batch 276/782, Critic Loss: -17.9542, Generator Loss: 81.4328 Batch 277/782, Critic Loss: -79.0249, Generator Loss: 89.8050 Batch 278/782, Critic Loss: -70.7955, Generator Loss: 71.4120 Batch 279/782, Critic Loss: -129.6429, Generator Loss: 118.8025 Batch 280/782, Critic Loss: -132.3864, Generator Loss: -83.4127 Batch 281/782, Critic Loss: -10.7367, Generator Loss: -27.6920 Batch 282/782, Critic Loss: -25.8605, Generator Loss: 39.6050 Batch 283/782, Critic Loss: -30.2481, Generator Loss: 100.5947 Batch 284/782, Critic Loss: -52.5947, Generator Loss: 103.5963 Batch 285/782, Critic Loss: -13.3525, Generator Loss: 98.3035 Batch 286/782, Critic Loss: -92.7803, Generator Loss: 91.4999 Batch 287/782, Critic Loss: -84.8916, Generator Loss: 88.9749 Batch 288/782, Critic Loss: -55.5283, Generator Loss: 106.5593 Batch 289/782, Critic Loss: -87.2003, Generator Loss: 3.4665 Batch 290/782, Critic Loss: -8.4940, Generator Loss: -73.2060 Batch 291/782, Critic Loss: 10.0084, Generator Loss: -33.0938 Batch 292/782, Critic Loss: 10.1089, Generator Loss: -27.6194 Batch 293/782, Critic Loss: 44.4952, Generator Loss: -25.8134 Batch 294/782, Critic Loss: 2.0079, Generator Loss: -5.2633 Batch 295/782, Critic Loss: -21.4443, Generator Loss: 16.9577 Batch 296/782, Critic Loss: -41.9592, Generator Loss: 69.1726 Batch 297/782, Critic Loss: -76.1966, Generator Loss: 82.7355 Batch 298/782, Critic Loss: -77.7074, Generator Loss: 16.9450 Batch 299/782, Critic Loss: -68.4202, Generator Loss: 125.5281 Batch 300/782, Critic Loss: -34.4304, Generator Loss: 103.5339 Batch 301/782, Critic Loss: -48.3012, Generator Loss: 17.8039 Batch 302/782, Critic Loss: -46.0182, Generator Loss: 35.5023 Batch 303/782, Critic Loss: -63.4363, Generator Loss: 125.4350 Batch 304/782, Critic Loss: -63.5290, Generator Loss: -11.2710 Batch 305/782, Critic Loss: -37.5468, Generator Loss: 72.9973 Batch 306/782, Critic Loss: -72.6030, Generator Loss: 90.1008 Batch 307/782, Critic Loss: -74.9333, Generator Loss: 44.8946 Batch 308/782, Critic Loss: -53.8770, Generator Loss: 135.1938 Batch 309/782, Critic Loss: -22.9456, Generator Loss: 131.6194 Batch 310/782, Critic Loss: -25.0096, Generator Loss: 110.7201 Batch 311/782, Critic Loss: -44.4795, Generator Loss: 62.4736 Batch 312/782, Critic Loss: -46.4170, Generator Loss: 93.1647 Batch 313/782, Critic Loss: -82.5099, Generator Loss: 22.3035 Batch 314/782, Critic Loss: -52.8689, Generator Loss: 138.1614 Batch 315/782, Critic Loss: -28.9032, Generator Loss: 136.2988 Batch 316/782, Critic Loss: -39.9559, Generator Loss: 109.2635 Batch 317/782, Critic Loss: -85.7209, Generator Loss: -22.3154 Batch 318/782, Critic Loss: -13.2030, Generator Loss: 142.1921 Batch 319/782, Critic Loss: -39.0138, Generator Loss: 121.1801 Batch 320/782, Critic Loss: -56.3717, Generator Loss: 53.3553 Batch 321/782, Critic Loss: 11.0756, Generator Loss: 123.7486 Batch 322/782, Critic Loss: -85.6922, Generator Loss: 99.1296 Batch 323/782, Critic Loss: -105.0180, Generator Loss: -53.6725 Batch 324/782, Critic Loss: -35.5471, Generator Loss: 31.1613 Batch 325/782, Critic Loss: -105.4738, Generator Loss: 141.4845 Batch 326/782, Critic Loss: -68.8915, Generator Loss: 88.8748 Batch 327/782, Critic Loss: -167.1126, Generator Loss: 90.9616 Batch 328/782, Critic Loss: -106.3640, Generator Loss: 112.4792 Batch 329/782, Critic Loss: -133.0217, Generator Loss: 122.9915 Batch 330/782, Critic Loss: -137.7667, Generator Loss: -66.4546 Batch 331/782, Critic Loss: -12.7926, Generator Loss: -27.1139 Batch 332/782, Critic Loss: -3.8231, Generator Loss: -77.0882 Batch 333/782, Critic Loss: -22.7331, Generator Loss: 66.9933 Batch 334/782, Critic Loss: -83.6369, Generator Loss: 117.0425 Batch 335/782, Critic Loss: -107.1346, Generator Loss: 107.9052 Batch 336/782, Critic Loss: -27.6753, Generator Loss: 112.0949 Batch 337/782, Critic Loss: -9.2227, Generator Loss: 145.9532 Batch 338/782, Critic Loss: -53.8647, Generator Loss: 98.1520 Batch 339/782, Critic Loss: -106.2875, Generator Loss: 106.6548 Batch 340/782, Critic Loss: -133.9935, Generator Loss: 152.3964 Batch 341/782, Critic Loss: -77.7771, Generator Loss: 11.7097 Batch 342/782, Critic Loss: -30.9724, Generator Loss: 40.4204 Batch 343/782, Critic Loss: -86.5961, Generator Loss: 135.5197 Batch 344/782, Critic Loss: -58.5696, Generator Loss: -7.7565 Batch 345/782, Critic Loss: -30.5205, Generator Loss: 17.8239 Batch 346/782, Critic Loss: -77.2715, Generator Loss: 143.4211 Batch 347/782, Critic Loss: -52.2561, Generator Loss: 134.3386 Batch 348/782, Critic Loss: -69.9373, Generator Loss: 23.8390 Batch 349/782, Critic Loss: -32.5542, Generator Loss: -1.3640 Batch 350/782, Critic Loss: -53.3480, Generator Loss: 130.6318 Batch 351/782, Critic Loss: -87.1464, Generator Loss: 111.1619 Batch 352/782, Critic Loss: -172.3852, Generator Loss: 121.6755 Batch 353/782, Critic Loss: -229.3705, Generator Loss: 164.2469 Batch 354/782, Critic Loss: -111.3295, Generator Loss: -19.6097 Batch 355/782, Critic Loss: -99.9696, Generator Loss: -6.7866 Batch 356/782, Critic Loss: -93.1267, Generator Loss: 73.1711 Batch 357/782, Critic Loss: -119.8588, Generator Loss: 105.5165 Batch 358/782, Critic Loss: -136.0189, Generator Loss: 123.4842 Batch 359/782, Critic Loss: -202.0711, Generator Loss: 72.9067 Batch 360/782, Critic Loss: -165.8936, Generator Loss: 166.4421 Batch 361/782, Critic Loss: -91.3804, Generator Loss: -64.7559 Batch 362/782, Critic Loss: -22.7952, Generator Loss: -83.4225 Batch 363/782, Critic Loss: -41.6230, Generator Loss: 52.1128 Batch 364/782, Critic Loss: -94.5199, Generator Loss: 130.4518 Batch 365/782, Critic Loss: -181.3401, Generator Loss: 72.8175 Batch 366/782, Critic Loss: -182.3503, Generator Loss: 84.2084 Batch 367/782, Critic Loss: -130.9670, Generator Loss: 178.6998 Batch 368/782, Critic Loss: -33.4813, Generator Loss: 164.5763 Batch 369/782, Critic Loss: -20.7900, Generator Loss: 137.7284 Batch 370/782, Critic Loss: -27.3949, Generator Loss: 123.8555 Batch 371/782, Critic Loss: -43.9667, Generator Loss: 131.8395 Batch 372/782, Critic Loss: -64.2594, Generator Loss: 157.2137 Batch 373/782, Critic Loss: -58.9611, Generator Loss: 40.2337 Batch 374/782, Critic Loss: -40.5809, Generator Loss: 90.6825 Batch 375/782, Critic Loss: -57.4199, Generator Loss: 118.5239 Batch 376/782, Critic Loss: -169.3878, Generator Loss: 158.4813 Batch 377/782, Critic Loss: -152.0993, Generator Loss: -103.7876 Batch 378/782, Critic Loss: 9.6261, Generator Loss: -188.9138 Batch 379/782, Critic Loss: 27.4297, Generator Loss: -189.1819 Batch 380/782, Critic Loss: 49.0202, Generator Loss: -187.8590 Batch 381/782, Critic Loss: 1.4041, Generator Loss: -181.7552 Batch 382/782, Critic Loss: -5.8842, Generator Loss: -161.9952 Batch 383/782, Critic Loss: -16.5410, Generator Loss: -115.4517 Batch 384/782, Critic Loss: -56.7327, Generator Loss: 23.7775 Batch 385/782, Critic Loss: -88.9927, Generator Loss: 84.7618 Batch 386/782, Critic Loss: -154.7577, Generator Loss: 103.9640 Batch 387/782, Critic Loss: -150.4880, Generator Loss: 168.2099 Batch 388/782, Critic Loss: -76.6682, Generator Loss: 119.8313 Batch 389/782, Critic Loss: -98.3019, Generator Loss: 173.6359 Batch 390/782, Critic Loss: -56.7182, Generator Loss: 141.4194 Batch 391/782, Critic Loss: -92.9074, Generator Loss: -39.0532 Batch 392/782, Critic Loss: -70.9439, Generator Loss: 149.0876 Batch 393/782, Critic Loss: -175.1862, Generator Loss: 5.9764 Batch 394/782, Critic Loss: -141.7676, Generator Loss: 181.2167 Batch 395/782, Critic Loss: -160.1719, Generator Loss: 67.3162 Batch 396/782, Critic Loss: -107.0567, Generator Loss: 162.2748 Batch 397/782, Critic Loss: -257.0168, Generator Loss: -48.6286 Batch 398/782, Critic Loss: -36.7360, Generator Loss: -37.7950 Batch 399/782, Critic Loss: -53.7366, Generator Loss: 172.9716 Batch 400/782, Critic Loss: -60.6802, Generator Loss: 159.2015 Batch 401/782, Critic Loss: -84.1205, Generator Loss: 106.2757 Batch 402/782, Critic Loss: -94.5374, Generator Loss: 143.8922 Batch 403/782, Critic Loss: -173.9769, Generator Loss: 121.1051 Batch 404/782, Critic Loss: -190.6348, Generator Loss: 163.5069 Batch 405/782, Critic Loss: -187.9482, Generator Loss: 44.7555 Batch 406/782, Critic Loss: -149.2131, Generator Loss: 148.9985 Batch 407/782, Critic Loss: -54.9735, Generator Loss: 88.4299 Batch 408/782, Critic Loss: -41.3387, Generator Loss: 116.6171 Batch 409/782, Critic Loss: -102.6361, Generator Loss: 150.7156 Batch 410/782, Critic Loss: -129.3990, Generator Loss: -129.3832 Batch 411/782, Critic Loss: -45.3243, Generator Loss: 48.2591 Batch 412/782, Critic Loss: -96.2291, Generator Loss: 194.7424 Batch 413/782, Critic Loss: -36.7711, Generator Loss: 192.4324 Batch 414/782, Critic Loss: -44.1954, Generator Loss: 171.8643 Batch 415/782, Critic Loss: -71.8028, Generator Loss: 115.4134 Batch 416/782, Critic Loss: -98.9324, Generator Loss: 68.6955 Batch 417/782, Critic Loss: -89.8275, Generator Loss: 112.3003 Batch 418/782, Critic Loss: -74.3342, Generator Loss: 138.7957 Batch 419/782, Critic Loss: -137.3121, Generator Loss: 129.2446 Batch 420/782, Critic Loss: -112.7439, Generator Loss: 203.9362 Batch 421/782, Critic Loss: -99.2562, Generator Loss: 110.8849 Batch 422/782, Critic Loss: -114.8773, Generator Loss: 64.4776 Batch 423/782, Critic Loss: -48.3059, Generator Loss: 177.1132 Batch 424/782, Critic Loss: -288.1008, Generator Loss: 153.0712 Batch 425/782, Critic Loss: -114.9317, Generator Loss: 192.7353 Batch 426/782, Critic Loss: -263.2151, Generator Loss: 192.9371 Batch 427/782, Critic Loss: -338.3910, Generator Loss: 136.2551 Batch 428/782, Critic Loss: -31.2867, Generator Loss: 103.3285 Batch 429/782, Critic Loss: -69.5492, Generator Loss: 188.1823 Batch 430/782, Critic Loss: -31.0125, Generator Loss: 193.3002 Batch 431/782, Critic Loss: -159.5917, Generator Loss: -5.8311 Batch 432/782, Critic Loss: -19.8421, Generator Loss: -112.2734 Batch 433/782, Critic Loss: -68.3149, Generator Loss: 57.0086 Batch 434/782, Critic Loss: -180.2988, Generator Loss: 181.8015 Batch 435/782, Critic Loss: -280.0449, Generator Loss: 184.3336 Batch 436/782, Critic Loss: -307.1828, Generator Loss: 208.9545 Batch 437/782, Critic Loss: -381.6313, Generator Loss: 181.9610 Batch 438/782, Critic Loss: -330.0581, Generator Loss: 224.7971 Batch 439/782, Critic Loss: -146.8118, Generator Loss: 17.8101 Batch 440/782, Critic Loss: -147.5154, Generator Loss: 202.3964 Batch 441/782, Critic Loss: -299.6125, Generator Loss: 123.4056 Batch 442/782, Critic Loss: -154.4334, Generator Loss: 199.9967 Batch 443/782, Critic Loss: -66.8107, Generator Loss: 202.5620 Batch 444/782, Critic Loss: -85.4886, Generator Loss: 195.3776 Batch 445/782, Critic Loss: -93.7413, Generator Loss: 175.3445 Batch 446/782, Critic Loss: -90.6933, Generator Loss: 145.3479 Batch 447/782, Critic Loss: -129.7114, Generator Loss: 178.5094 Batch 448/782, Critic Loss: -142.4915, Generator Loss: 24.6270 Batch 449/782, Critic Loss: -104.2494, Generator Loss: 214.9115 Batch 450/782, Critic Loss: -118.6190, Generator Loss: -29.8265 Batch 451/782, Critic Loss: -116.0639, Generator Loss: 213.7484 Batch 452/782, Critic Loss: -130.8515, Generator Loss: 148.1235 Batch 453/782, Critic Loss: -227.9205, Generator Loss: 169.1902 Batch 454/782, Critic Loss: -323.5749, Generator Loss: 210.4112 Batch 455/782, Critic Loss: -357.5991, Generator Loss: 97.1933 Batch 456/782, Critic Loss: -57.4956, Generator Loss: 161.6352 Batch 457/782, Critic Loss: -239.9156, Generator Loss: 199.9915 Batch 458/782, Critic Loss: -155.3649, Generator Loss: 110.6256 Batch 459/782, Critic Loss: -199.1418, Generator Loss: 228.6311 Batch 460/782, Critic Loss: -222.7730, Generator Loss: 95.9529 Batch 461/782, Critic Loss: -63.6475, Generator Loss: -97.6894 Batch 462/782, Critic Loss: -64.8920, Generator Loss: -37.5577 Batch 463/782, Critic Loss: -54.8192, Generator Loss: -5.4759 Batch 464/782, Critic Loss: -71.5079, Generator Loss: 11.5720 Batch 465/782, Critic Loss: -49.8782, Generator Loss: -4.5120 Batch 466/782, Critic Loss: -146.5382, Generator Loss: 123.1599 Batch 467/782, Critic Loss: -223.6118, Generator Loss: 106.6790 Batch 468/782, Critic Loss: -125.9487, Generator Loss: 212.6597 Batch 469/782, Critic Loss: -141.8360, Generator Loss: 118.6035 Batch 470/782, Critic Loss: -108.8646, Generator Loss: 147.2370 Batch 471/782, Critic Loss: -208.5284, Generator Loss: 229.0537 Batch 472/782, Critic Loss: -137.6009, Generator Loss: -99.8226 Batch 473/782, Critic Loss: -60.8313, Generator Loss: 119.9416 Batch 474/782, Critic Loss: -202.6936, Generator Loss: 257.0236 Batch 475/782, Critic Loss: -79.7582, Generator Loss: 225.0358 Batch 476/782, Critic Loss: -174.7721, Generator Loss: -190.9473 Batch 477/782, Critic Loss: -32.5391, Generator Loss: -205.4031 Batch 478/782, Critic Loss: -31.5143, Generator Loss: -154.0388 Batch 479/782, Critic Loss: -40.8922, Generator Loss: -92.6517 Batch 480/782, Critic Loss: -40.2223, Generator Loss: -78.7431 Batch 481/782, Critic Loss: -40.5806, Generator Loss: -53.0071 Batch 482/782, Critic Loss: -19.3115, Generator Loss: -28.1980 Batch 483/782, Critic Loss: -55.4043, Generator Loss: 70.0837 Batch 484/782, Critic Loss: -144.3182, Generator Loss: 184.4132 Batch 485/782, Critic Loss: -225.2959, Generator Loss: 180.2744 Batch 486/782, Critic Loss: -313.3794, Generator Loss: 224.1622 Batch 487/782, Critic Loss: -405.9261, Generator Loss: 224.1611 Batch 488/782, Critic Loss: -406.3588, Generator Loss: 236.1303 Batch 489/782, Critic Loss: -428.2912, Generator Loss: 262.9150 Batch 490/782, Critic Loss: -265.0125, Generator Loss: -173.7543 Batch 491/782, Critic Loss: -29.0721, Generator Loss: -232.3880 Batch 492/782, Critic Loss: -25.3762, Generator Loss: -194.0030 Batch 493/782, Critic Loss: -10.9221, Generator Loss: -137.8557 Batch 494/782, Critic Loss: -55.8562, Generator Loss: -94.9724 Batch 495/782, Critic Loss: -29.6704, Generator Loss: -91.4765 Batch 496/782, Critic Loss: -19.2740, Generator Loss: -82.2172 Batch 497/782, Critic Loss: 3.6387, Generator Loss: -71.2832 Batch 498/782, Critic Loss: -45.4188, Generator Loss: 14.1641 Batch 499/782, Critic Loss: -86.5500, Generator Loss: -75.1497 Batch 500/782, Critic Loss: -27.2967, Generator Loss: -109.2153 Batch 501/782, Critic Loss: 29.5963, Generator Loss: -15.3386 Batch 502/782, Critic Loss: -51.8879, Generator Loss: 172.3991 Batch 503/782, Critic Loss: -146.0543, Generator Loss: 223.0968 Batch 504/782, Critic Loss: -214.5837, Generator Loss: 7.9211 Batch 505/782, Critic Loss: -128.0095, Generator Loss: 202.7195 Batch 506/782, Critic Loss: -185.6181, Generator Loss: 205.7455 Batch 507/782, Critic Loss: -285.8203, Generator Loss: 111.3192 Batch 508/782, Critic Loss: -226.9961, Generator Loss: 243.5677 Batch 509/782, Critic Loss: -202.9954, Generator Loss: -105.1163 Batch 510/782, Critic Loss: -114.3112, Generator Loss: 150.2276 Batch 511/782, Critic Loss: -216.9540, Generator Loss: 240.0209 Batch 512/782, Critic Loss: -211.4448, Generator Loss: -73.6860 Batch 513/782, Critic Loss: -103.5565, Generator Loss: 10.7461 Batch 514/782, Critic Loss: -127.0107, Generator Loss: 143.3685 Batch 515/782, Critic Loss: -198.4611, Generator Loss: 213.7395 Batch 516/782, Critic Loss: -206.6661, Generator Loss: 12.2683 Batch 517/782, Critic Loss: -158.2303, Generator Loss: 274.2209 Batch 518/782, Critic Loss: -123.2770, Generator Loss: 237.1673 Batch 519/782, Critic Loss: -211.3243, Generator Loss: -179.4524 Batch 520/782, Critic Loss: -61.5071, Generator Loss: -194.7623 Batch 521/782, Critic Loss: -43.4793, Generator Loss: -72.8525 Batch 522/782, Critic Loss: -80.5795, Generator Loss: 119.8477 Batch 523/782, Critic Loss: -191.3972, Generator Loss: 264.5702 Batch 524/782, Critic Loss: -242.6855, Generator Loss: 58.3204 Batch 525/782, Critic Loss: -206.7962, Generator Loss: 206.2580 Batch 526/782, Critic Loss: -309.2927, Generator Loss: 192.1051 Batch 527/782, Critic Loss: -279.5981, Generator Loss: 193.1082 Batch 528/782, Critic Loss: -280.4957, Generator Loss: 202.7859 Batch 529/782, Critic Loss: -305.1549, Generator Loss: 215.8969 Batch 530/782, Critic Loss: -302.7429, Generator Loss: 20.2259 Batch 531/782, Critic Loss: -169.5537, Generator Loss: 219.1409 Batch 532/782, Critic Loss: -176.5372, Generator Loss: 129.7943 Batch 533/782, Critic Loss: -312.2497, Generator Loss: 290.0109 Batch 534/782, Critic Loss: -138.4167, Generator Loss: 220.0598 Batch 535/782, Critic Loss: -221.8627, Generator Loss: 11.7022 Batch 536/782, Critic Loss: -194.3809, Generator Loss: 280.4521 Batch 537/782, Critic Loss: -222.9953, Generator Loss: 188.9412 Batch 538/782, Critic Loss: -307.2146, Generator Loss: 117.2868 Batch 539/782, Critic Loss: -259.3457, Generator Loss: 296.0071 Batch 540/782, Critic Loss: -174.7008, Generator Loss: 229.8824 Batch 541/782, Critic Loss: -261.6144, Generator Loss: 75.5110 Batch 542/782, Critic Loss: -178.2133, Generator Loss: 275.0653 Batch 543/782, Critic Loss: -367.8590, Generator Loss: -35.2320 Batch 544/782, Critic Loss: -54.9052, Generator Loss: -233.4323 Batch 545/782, Critic Loss: -35.4588, Generator Loss: -177.1021 Batch 546/782, Critic Loss: -49.6745, Generator Loss: -15.9074 Batch 547/782, Critic Loss: -87.2479, Generator Loss: 160.7426 Batch 548/782, Critic Loss: -192.0152, Generator Loss: 273.6410 Batch 549/782, Critic Loss: -324.9033, Generator Loss: -54.5717 Batch 550/782, Critic Loss: -208.5043, Generator Loss: 213.1502 Batch 551/782, Critic Loss: -312.3528, Generator Loss: 280.8045 Batch 552/782, Critic Loss: -374.0363, Generator Loss: 100.8230 Batch 553/782, Critic Loss: -281.1172, Generator Loss: 315.6834 Batch 554/782, Critic Loss: -182.8819, Generator Loss: 126.9379 Batch 555/782, Critic Loss: -291.5683, Generator Loss: 275.7223 Batch 556/782, Critic Loss: -337.4656, Generator Loss: -21.9341 Batch 557/782, Critic Loss: -205.2764, Generator Loss: 311.7754 Batch 558/782, Critic Loss: -206.0269, Generator Loss: 206.7116 Batch 559/782, Critic Loss: -297.9291, Generator Loss: 142.1171 Batch 560/782, Critic Loss: -180.6922, Generator Loss: 269.5627 Batch 561/782, Critic Loss: -192.1819, Generator Loss: 215.7752 Batch 562/782, Critic Loss: -187.5895, Generator Loss: 158.3757 Batch 563/782, Critic Loss: -248.8107, Generator Loss: 259.0585 Batch 564/782, Critic Loss: -439.6783, Generator Loss: 285.0236 Batch 565/782, Critic Loss: -520.6465, Generator Loss: 289.2095 Batch 566/782, Critic Loss: -515.7629, Generator Loss: 310.4088 Batch 567/782, Critic Loss: -443.1354, Generator Loss: 20.4755 Batch 568/782, Critic Loss: -59.9332, Generator Loss: -166.5372 Batch 569/782, Critic Loss: -57.5065, Generator Loss: 148.4088 Batch 570/782, Critic Loss: -162.9228, Generator Loss: 311.9352 Batch 571/782, Critic Loss: -199.1941, Generator Loss: 12.3183 Batch 572/782, Critic Loss: -203.0326, Generator Loss: 318.0752 Batch 573/782, Critic Loss: -303.7727, Generator Loss: 37.9128 Batch 574/782, Critic Loss: -169.7743, Generator Loss: 230.4371 Batch 575/782, Critic Loss: -251.5645, Generator Loss: 308.6215 Batch 576/782, Critic Loss: -302.2941, Generator Loss: 127.7717 Batch 577/782, Critic Loss: -222.7786, Generator Loss: 311.4590 Batch 578/782, Critic Loss: -483.7784, Generator Loss: 306.2142 Batch 579/782, Critic Loss: -353.5865, Generator Loss: 100.5162 Batch 580/782, Critic Loss: -152.6309, Generator Loss: 279.2026 Batch 581/782, Critic Loss: -341.2908, Generator Loss: 323.5013 Batch 582/782, Critic Loss: -370.8999, Generator Loss: 211.8738 Batch 583/782, Critic Loss: -439.1102, Generator Loss: 289.4450 Batch 584/782, Critic Loss: -518.1190, Generator Loss: 341.1893 Batch 585/782, Critic Loss: -568.8104, Generator Loss: 229.9999 Batch 586/782, Critic Loss: -429.3481, Generator Loss: 360.0740 Batch 587/782, Critic Loss: -199.7315, Generator Loss: 355.3146 Batch 588/782, Critic Loss: -399.0558, Generator Loss: -177.3303 Batch 589/782, Critic Loss: -88.2275, Generator Loss: -288.5413 Batch 590/782, Critic Loss: 29.7560, Generator Loss: -296.7675 Batch 591/782, Critic Loss: -26.5687, Generator Loss: -307.3267 Batch 592/782, Critic Loss: -34.5596, Generator Loss: -291.6630 Batch 593/782, Critic Loss: 25.8048, Generator Loss: -285.6765 Batch 594/782, Critic Loss: -12.1727, Generator Loss: -275.0718 Batch 595/782, Critic Loss: -6.8003, Generator Loss: -238.3566 Batch 596/782, Critic Loss: -56.2004, Generator Loss: -202.7415 Batch 597/782, Critic Loss: -93.6886, Generator Loss: -84.3894 Batch 598/782, Critic Loss: -136.5895, Generator Loss: 100.6919 Batch 599/782, Critic Loss: -193.6544, Generator Loss: 259.5424 Batch 600/782, Critic Loss: -257.4687, Generator Loss: 128.0413 Batch 601/782, Critic Loss: -235.2225, Generator Loss: 309.2726 Batch 602/782, Critic Loss: -189.3096, Generator Loss: 174.6016 Batch 603/782, Critic Loss: -234.7519, Generator Loss: 260.7307 Batch 604/782, Critic Loss: -279.2338, Generator Loss: 247.9323 Batch 605/782, Critic Loss: -267.4395, Generator Loss: 287.5172 Batch 606/782, Critic Loss: -255.4232, Generator Loss: 120.7474 Batch 607/782, Critic Loss: -263.3874, Generator Loss: 326.3015 Batch 608/782, Critic Loss: -261.0350, Generator Loss: 206.8897 Batch 609/782, Critic Loss: -276.8687, Generator Loss: 51.0764 Batch 610/782, Critic Loss: -271.1900, Generator Loss: 349.2186 Batch 611/782, Critic Loss: -233.5402, Generator Loss: 255.8846 Batch 612/782, Critic Loss: -297.0719, Generator Loss: 128.8489 Batch 613/782, Critic Loss: -308.5014, Generator Loss: 359.4424 Batch 614/782, Critic Loss: -227.4591, Generator Loss: 270.8644 Batch 615/782, Critic Loss: -446.3727, Generator Loss: 193.6693 Batch 616/782, Critic Loss: -428.1392, Generator Loss: 350.4498 Batch 617/782, Critic Loss: -317.2932, Generator Loss: 130.1484 Batch 618/782, Critic Loss: -378.9901, Generator Loss: 366.2343 Batch 619/782, Critic Loss: -273.2643, Generator Loss: 167.3271 Batch 620/782, Critic Loss: -390.9294, Generator Loss: 349.4191 Batch 621/782, Critic Loss: -378.1579, Generator Loss: 68.6894 Batch 622/782, Critic Loss: -347.2136, Generator Loss: 376.5245 Batch 623/782, Critic Loss: -291.6792, Generator Loss: 281.1034 Batch 624/782, Critic Loss: -462.3681, Generator Loss: 96.9528 Batch 625/782, Critic Loss: -304.7383, Generator Loss: 362.1144 Batch 626/782, Critic Loss: -357.6931, Generator Loss: 186.0708 Batch 627/782, Critic Loss: -415.3375, Generator Loss: 361.5190 Batch 628/782, Critic Loss: -381.2480, Generator Loss: 146.3914 Batch 629/782, Critic Loss: -327.5515, Generator Loss: 335.5549 Batch 630/782, Critic Loss: -527.2192, Generator Loss: 238.3741 Batch 631/782, Critic Loss: -397.4857, Generator Loss: 401.5842 Batch 632/782, Critic Loss: -288.8766, Generator Loss: 20.4417 Batch 633/782, Critic Loss: -250.3909, Generator Loss: 396.1044 Batch 634/782, Critic Loss: -182.2244, Generator Loss: 326.7162 Batch 635/782, Critic Loss: -313.2044, Generator Loss: 36.2860 Batch 636/782, Critic Loss: -210.4186, Generator Loss: 301.0331 Batch 637/782, Critic Loss: -327.3104, Generator Loss: 353.3208 Batch 638/782, Critic Loss: -452.2879, Generator Loss: -12.7550 Batch 639/782, Critic Loss: -149.3076, Generator Loss: -11.3952 Batch 640/782, Critic Loss: -140.9952, Generator Loss: 326.3068 Batch 641/782, Critic Loss: -346.7720, Generator Loss: 256.6133 Batch 642/782, Critic Loss: -486.7770, Generator Loss: 396.5078 Batch 643/782, Critic Loss: -329.1220, Generator Loss: -63.4647 Batch 644/782, Critic Loss: -165.7805, Generator Loss: 67.4798 Batch 645/782, Critic Loss: -281.2360, Generator Loss: 367.1125 Batch 646/782, Critic Loss: -406.6754, Generator Loss: 161.5172 Batch 647/782, Critic Loss: -390.8468, Generator Loss: 398.4094 Batch 648/782, Critic Loss: -446.9073, Generator Loss: 106.2826 Batch 649/782, Critic Loss: -405.9319, Generator Loss: 390.8099 Batch 650/782, Critic Loss: -507.7945, Generator Loss: 215.2380 Batch 651/782, Critic Loss: -501.3671, Generator Loss: 393.0179 Batch 652/782, Critic Loss: -516.5538, Generator Loss: 57.8534 Batch 653/782, Critic Loss: -333.5585, Generator Loss: 417.4321 Batch 654/782, Critic Loss: -391.4752, Generator Loss: 207.1366 Batch 655/782, Critic Loss: -504.1879, Generator Loss: 408.7170 Batch 656/782, Critic Loss: -500.6402, Generator Loss: 34.7007 Batch 657/782, Critic Loss: -290.7892, Generator Loss: 367.7416 Batch 658/782, Critic Loss: -525.3391, Generator Loss: 335.6297 Batch 659/782, Critic Loss: -449.9044, Generator Loss: 247.9709 Batch 660/782, Critic Loss: -519.1481, Generator Loss: 421.2654 Batch 661/782, Critic Loss: -494.1949, Generator Loss: 21.3283 Batch 662/782, Critic Loss: -194.1129, Generator Loss: 220.1356 Batch 663/782, Critic Loss: -439.5913, Generator Loss: 434.6806 Batch 664/782, Critic Loss: -225.0137, Generator Loss: 398.8702 Batch 665/782, Critic Loss: -568.8575, Generator Loss: -11.3181 Batch 666/782, Critic Loss: -59.0165, Generator Loss: -353.1645 Batch 667/782, Critic Loss: -45.7826, Generator Loss: -288.3878 Batch 668/782, Critic Loss: -115.6968, Generator Loss: -137.8723 Batch 669/782, Critic Loss: -150.3601, Generator Loss: 23.2491 Batch 670/782, Critic Loss: -167.7450, Generator Loss: 228.6670 Batch 671/782, Critic Loss: -285.8621, Generator Loss: 353.1788 Batch 672/782, Critic Loss: -354.6093, Generator Loss: 152.9482 Batch 673/782, Critic Loss: -205.8700, Generator Loss: 441.3852 Batch 674/782, Critic Loss: -204.0996, Generator Loss: 410.0168 Batch 675/782, Critic Loss: -314.5738, Generator Loss: -126.2492 Batch 676/782, Critic Loss: -146.5204, Generator Loss: 264.9408 Batch 677/782, Critic Loss: -414.0363, Generator Loss: 417.8563 Batch 678/782, Critic Loss: -503.1671, Generator Loss: 141.7709 Batch 679/782, Critic Loss: -415.0809, Generator Loss: 440.0574 Batch 680/782, Critic Loss: -466.3348, Generator Loss: 198.7724 Batch 681/782, Critic Loss: -493.6382, Generator Loss: 443.0955 Batch 682/782, Critic Loss: -510.3361, Generator Loss: 241.7133 Batch 683/782, Critic Loss: -392.3703, Generator Loss: 414.9597 Batch 684/782, Critic Loss: -400.3218, Generator Loss: 309.7515 Batch 685/782, Critic Loss: -428.2786, Generator Loss: 375.1231 Batch 686/782, Critic Loss: -624.4711, Generator Loss: 411.7258 Batch 687/782, Critic Loss: -634.1790, Generator Loss: 391.0437 Batch 688/782, Critic Loss: -420.6858, Generator Loss: 69.9346 Batch 689/782, Critic Loss: -245.4965, Generator Loss: 376.5449 Batch 690/782, Critic Loss: -412.1694, Generator Loss: 414.3110 Batch 691/782, Critic Loss: -705.9817, Generator Loss: 318.6769 Batch 692/782, Critic Loss: -354.6000, Generator Loss: 387.9164 Batch 693/782, Critic Loss: -551.6429, Generator Loss: 435.3957 Batch 694/782, Critic Loss: -454.6155, Generator Loss: -164.8851 Batch 695/782, Critic Loss: -103.8219, Generator Loss: -333.9753 Batch 696/782, Critic Loss: -81.9064, Generator Loss: -278.7136 Batch 697/782, Critic Loss: -102.4592, Generator Loss: -165.0904 Batch 698/782, Critic Loss: -167.0392, Generator Loss: -31.9977 Batch 699/782, Critic Loss: -201.9614, Generator Loss: 97.2213 Batch 700/782, Critic Loss: -277.4456, Generator Loss: 335.3699 Batch 701/782, Critic Loss: -421.9639, Generator Loss: 142.4919 Batch 702/782, Critic Loss: -385.2822, Generator Loss: 465.8632 Batch 703/782, Critic Loss: -220.2740, Generator Loss: 431.7216 Batch 704/782, Critic Loss: -430.5855, Generator Loss: -68.9444 Batch 705/782, Critic Loss: -263.3655, Generator Loss: 159.3674 Batch 706/782, Critic Loss: -302.6891, Generator Loss: 418.5106 Batch 707/782, Critic Loss: -170.4150, Generator Loss: 372.0250 Batch 708/782, Critic Loss: -411.6483, Generator Loss: 34.0121 Batch 709/782, Critic Loss: -285.2204, Generator Loss: 292.7722 Batch 710/782, Critic Loss: -371.6585, Generator Loss: 351.4934 Batch 711/782, Critic Loss: -510.7016, Generator Loss: 198.3286 Batch 712/782, Critic Loss: -460.5036, Generator Loss: 450.2161 Batch 713/782, Critic Loss: -420.6142, Generator Loss: -43.0285 Batch 714/782, Critic Loss: -331.2954, Generator Loss: 344.8205 Batch 715/782, Critic Loss: -675.0916, Generator Loss: 430.8014 Batch 716/782, Critic Loss: -737.7648, Generator Loss: 389.5511 Batch 717/782, Critic Loss: -609.1305, Generator Loss: 484.4132 Batch 718/782, Critic Loss: -152.4688, Generator Loss: 477.1908 Batch 719/782, Critic Loss: -159.8132, Generator Loss: 341.7887 Batch 720/782, Critic Loss: -246.4500, Generator Loss: 209.1273 Batch 721/782, Critic Loss: -456.7545, Generator Loss: 412.4949 Batch 722/782, Critic Loss: -629.5558, Generator Loss: 398.3185 Batch 723/782, Critic Loss: -440.2039, Generator Loss: 477.8526 Batch 724/782, Critic Loss: -299.8053, Generator Loss: 376.2532 Batch 725/782, Critic Loss: -354.0023, Generator Loss: 196.0269 Batch 726/782, Critic Loss: -317.2856, Generator Loss: 363.3900 Batch 727/782, Critic Loss: -487.9143, Generator Loss: 380.1059 Batch 728/782, Critic Loss: -480.4758, Generator Loss: 495.2392 Batch 729/782, Critic Loss: -315.8978, Generator Loss: 445.5186 Batch 730/782, Critic Loss: -481.8911, Generator Loss: 234.2188 Batch 731/782, Critic Loss: -394.8758, Generator Loss: 380.3239 Batch 732/782, Critic Loss: -479.4572, Generator Loss: 388.4437 Batch 733/782, Critic Loss: -756.5653, Generator Loss: 463.3473 Batch 734/782, Critic Loss: -607.9138, Generator Loss: 402.0373 Batch 735/782, Critic Loss: -475.4044, Generator Loss: 506.4267 Batch 736/782, Critic Loss: -164.9573, Generator Loss: 489.5243 Batch 737/782, Critic Loss: -167.4159, Generator Loss: 423.4815 Batch 738/782, Critic Loss: -282.5647, Generator Loss: 298.6483 Batch 739/782, Critic Loss: -340.1844, Generator Loss: 302.6118 Batch 740/782, Critic Loss: -527.4645, Generator Loss: 334.7769 Batch 741/782, Critic Loss: -575.3890, Generator Loss: 419.3829 Batch 742/782, Critic Loss: -638.2266, Generator Loss: 201.4001 Batch 743/782, Critic Loss: -525.7450, Generator Loss: 511.5751 Batch 744/782, Critic Loss: -235.6909, Generator Loss: 448.0039 Batch 745/782, Critic Loss: -506.6289, Generator Loss: 4.6967 Batch 746/782, Critic Loss: -406.6177, Generator Loss: 466.0666 Batch 747/782, Critic Loss: -604.2352, Generator Loss: 405.0086 Batch 748/782, Critic Loss: -743.1827, Generator Loss: 502.2586 Batch 749/782, Critic Loss: -757.7863, Generator Loss: 378.9200 Batch 750/782, Critic Loss: -501.2435, Generator Loss: 525.8450 Batch 751/782, Critic Loss: -315.9394, Generator Loss: 500.7301 Batch 752/782, Critic Loss: -641.0418, Generator Loss: 96.6448 Batch 753/782, Critic Loss: -162.1774, Generator Loss: -316.3743 Batch 754/782, Critic Loss: -151.2359, Generator Loss: 131.2783 Batch 755/782, Critic Loss: -401.8209, Generator Loss: 506.2430 Batch 756/782, Critic Loss: -297.6172, Generator Loss: 249.4408 Batch 757/782, Critic Loss: -392.0189, Generator Loss: 465.0865 Batch 758/782, Critic Loss: -370.3572, Generator Loss: 219.5402 Batch 759/782, Critic Loss: -379.1008, Generator Loss: 360.0520 Batch 760/782, Critic Loss: -612.0156, Generator Loss: 420.4080 Batch 761/782, Critic Loss: -642.0502, Generator Loss: 481.3409 Batch 762/782, Critic Loss: -442.2708, Generator Loss: -252.0464 Batch 763/782, Critic Loss: -212.3629, Generator Loss: -128.4526 Batch 764/782, Critic Loss: -337.3435, Generator Loss: 464.9253 Batch 765/782, Critic Loss: -414.6816, Generator Loss: 423.4415 Batch 766/782, Critic Loss: -636.1899, Generator Loss: 279.6651 Batch 767/782, Critic Loss: -607.9102, Generator Loss: 466.2605 Batch 768/782, Critic Loss: -743.0511, Generator Loss: -3.9064 Batch 769/782, Critic Loss: -335.7972, Generator Loss: 380.7330 Batch 770/782, Critic Loss: -662.4666, Generator Loss: 521.8980 Batch 771/782, Critic Loss: -770.5820, Generator Loss: -239.5848 Batch 772/782, Critic Loss: -233.9075, Generator Loss: 92.9947 Batch 773/782, Critic Loss: -464.2297, Generator Loss: 488.4202 Batch 774/782, Critic Loss: -347.3123, Generator Loss: 270.7634 Batch 775/782, Critic Loss: -724.6901, Generator Loss: 544.1710 Batch 776/782, Critic Loss: -304.1259, Generator Loss: 404.6740 Batch 777/782, Critic Loss: -697.9916, Generator Loss: 459.4776 Batch 778/782, Critic Loss: -826.0715, Generator Loss: 524.2986 Batch 779/782, Critic Loss: -801.9186, Generator Loss: 351.9839 Batch 780/782, Critic Loss: -759.6152, Generator Loss: 561.1922 Batch 781/782, Critic Loss: -445.0027, Generator Loss: 317.1749 Batch 782/782, Critic Loss: 4702.3433, Generator Loss: 418.5036
Epoch 2/10 start Batch 1/782, Critic Loss: 856.2699, Generator Loss: 371.7342 Batch 2/782, Critic Loss: 678.8665, Generator Loss: 354.6173 Batch 3/782, Critic Loss: 634.7745, Generator Loss: 335.0392 Batch 4/782, Critic Loss: 393.9493, Generator Loss: 326.3464 Batch 5/782, Critic Loss: 892.2678, Generator Loss: 310.8895 Batch 6/782, Critic Loss: 361.9343, Generator Loss: 309.8482 Batch 7/782, Critic Loss: 476.8417, Generator Loss: 317.1620 Batch 8/782, Critic Loss: 567.8340, Generator Loss: 288.5782 Batch 9/782, Critic Loss: 418.9858, Generator Loss: 311.5741 Batch 10/782, Critic Loss: 190.9125, Generator Loss: 308.2197 Batch 11/782, Critic Loss: 214.6934, Generator Loss: 296.1552 Batch 12/782, Critic Loss: 210.7397, Generator Loss: 284.1454 Batch 13/782, Critic Loss: 152.5405, Generator Loss: 291.8964 Batch 14/782, Critic Loss: 427.1361, Generator Loss: 262.2784 Batch 15/782, Critic Loss: 214.1109, Generator Loss: 275.6182 Batch 16/782, Critic Loss: 188.3472, Generator Loss: 275.1928 Batch 17/782, Critic Loss: 252.6185, Generator Loss: 279.8694 Batch 18/782, Critic Loss: 209.2754, Generator Loss: 269.0540 Batch 19/782, Critic Loss: 178.6270, Generator Loss: 254.9166 Batch 20/782, Critic Loss: 219.2594, Generator Loss: 271.3539 Batch 21/782, Critic Loss: 136.0263, Generator Loss: 244.6803 Batch 22/782, Critic Loss: 225.2763, Generator Loss: 258.6506 Batch 23/782, Critic Loss: 232.0623, Generator Loss: 256.4706 Batch 24/782, Critic Loss: 132.9249, Generator Loss: 253.8543 Batch 25/782, Critic Loss: 242.7081, Generator Loss: 258.9343 Batch 26/782, Critic Loss: 125.1868, Generator Loss: 257.8552 Batch 27/782, Critic Loss: 173.1548, Generator Loss: 257.0139 Batch 28/782, Critic Loss: 156.1471, Generator Loss: 251.0185 Batch 29/782, Critic Loss: 226.2848, Generator Loss: 245.8295 Batch 30/782, Critic Loss: 70.9463, Generator Loss: 242.0736 Batch 31/782, Critic Loss: 152.2658, Generator Loss: 231.5615 Batch 32/782, Critic Loss: 85.5024, Generator Loss: 245.6931 Batch 33/782, Critic Loss: 283.3953, Generator Loss: 245.0544 Batch 34/782, Critic Loss: 215.3740, Generator Loss: 237.2411 Batch 35/782, Critic Loss: 149.6818, Generator Loss: 223.1476 Batch 36/782, Critic Loss: 142.3883, Generator Loss: 224.8282 Batch 37/782, Critic Loss: 177.2194, Generator Loss: 230.9789 Batch 38/782, Critic Loss: 192.3277, Generator Loss: 219.7854 Batch 39/782, Critic Loss: 83.8313, Generator Loss: 229.6725 Batch 40/782, Critic Loss: 60.2322, Generator Loss: 224.4273 Batch 41/782, Critic Loss: 97.1359, Generator Loss: 224.1789 Batch 42/782, Critic Loss: 79.4922, Generator Loss: 209.7657 Batch 43/782, Critic Loss: 164.9084, Generator Loss: 219.7317 Batch 44/782, Critic Loss: 130.2692, Generator Loss: 209.2009 Batch 45/782, Critic Loss: 87.6961, Generator Loss: 218.3866 Batch 46/782, Critic Loss: 113.8908, Generator Loss: 210.5611 Batch 47/782, Critic Loss: 52.7234, Generator Loss: 223.2947 Batch 48/782, Critic Loss: 93.0062, Generator Loss: 207.7066 Batch 49/782, Critic Loss: 111.2784, Generator Loss: 210.0276 Batch 50/782, Critic Loss: 77.6499, Generator Loss: 205.0943 Batch 51/782, Critic Loss: 80.4493, Generator Loss: 202.8807 Batch 52/782, Critic Loss: 86.9463, Generator Loss: 202.8333 Batch 53/782, Critic Loss: 126.3082, Generator Loss: 213.0143 Batch 54/782, Critic Loss: 138.6314, Generator Loss: 208.6637 Batch 55/782, Critic Loss: 65.9923, Generator Loss: 204.7556 Batch 56/782, Critic Loss: 99.1721, Generator Loss: 203.6645 Batch 57/782, Critic Loss: 110.2915, Generator Loss: 211.2883 Batch 58/782, Critic Loss: 105.5216, Generator Loss: 206.2151 Batch 59/782, Critic Loss: 175.1206, Generator Loss: 192.6351 Batch 60/782, Critic Loss: 52.2169, Generator Loss: 203.4081 Batch 61/782, Critic Loss: 76.7402, Generator Loss: 199.2562 Batch 62/782, Critic Loss: 92.9563, Generator Loss: 194.0462 Batch 63/782, Critic Loss: 99.8034, Generator Loss: 199.7582 Batch 64/782, Critic Loss: 53.3104, Generator Loss: 196.2402 Batch 65/782, Critic Loss: 52.5019, Generator Loss: 190.3297 Batch 66/782, Critic Loss: 71.9295, Generator Loss: 186.0076 Batch 67/782, Critic Loss: 80.5172, Generator Loss: 202.1871 Batch 68/782, Critic Loss: 88.5571, Generator Loss: 189.1081 Batch 69/782, Critic Loss: 153.2382, Generator Loss: 190.3137 Batch 70/782, Critic Loss: 116.8902, Generator Loss: 183.8620 Batch 71/782, Critic Loss: 95.5711, Generator Loss: 184.6648 Batch 72/782, Critic Loss: 56.3302, Generator Loss: 179.7870 Batch 73/782, Critic Loss: 63.1300, Generator Loss: 181.5319 Batch 74/782, Critic Loss: 77.6721, Generator Loss: 184.4281 Batch 75/782, Critic Loss: 76.0533, Generator Loss: 188.5882 Batch 76/782, Critic Loss: 50.2478, Generator Loss: 188.7473 Batch 77/782, Critic Loss: 76.1629, Generator Loss: 189.5045 Batch 78/782, Critic Loss: 68.6460, Generator Loss: 188.7686 Batch 79/782, Critic Loss: 112.2881, Generator Loss: 180.3285 Batch 80/782, Critic Loss: 62.9889, Generator Loss: 183.5004 Batch 81/782, Critic Loss: 60.6816, Generator Loss: 189.8785 Batch 82/782, Critic Loss: 56.3421, Generator Loss: 179.3059 Batch 83/782, Critic Loss: 80.8708, Generator Loss: 179.7146 Batch 84/782, Critic Loss: 163.5967, Generator Loss: 178.1660 Batch 85/782, Critic Loss: 45.1970, Generator Loss: 177.4208 Batch 86/782, Critic Loss: 80.1816, Generator Loss: 171.1874 Batch 87/782, Critic Loss: 49.4781, Generator Loss: 173.3868 Batch 88/782, Critic Loss: 44.6258, Generator Loss: 174.3520 Batch 89/782, Critic Loss: 37.2472, Generator Loss: 173.7980 Batch 90/782, Critic Loss: 73.7258, Generator Loss: 166.4994 Batch 91/782, Critic Loss: 47.4451, Generator Loss: 173.0340 Batch 92/782, Critic Loss: 32.6604, Generator Loss: 176.3387 Batch 93/782, Critic Loss: 36.2910, Generator Loss: 176.1758 Batch 94/782, Critic Loss: 47.5568, Generator Loss: 173.3804 Batch 95/782, Critic Loss: 55.9674, Generator Loss: 173.6207 Batch 96/782, Critic Loss: 66.0966, Generator Loss: 170.4222 Batch 97/782, Critic Loss: 38.4436, Generator Loss: 166.9523 Batch 98/782, Critic Loss: 81.0600, Generator Loss: 172.8554 Batch 99/782, Critic Loss: 45.9882, Generator Loss: 174.1221 Batch 100/782, Critic Loss: 56.4111, Generator Loss: 173.1816 Batch 101/782, Critic Loss: 40.5247, Generator Loss: 170.4456 Batch 102/782, Critic Loss: 53.3585, Generator Loss: 168.0742 Batch 103/782, Critic Loss: 30.5674, Generator Loss: 177.0172 Batch 104/782, Critic Loss: 37.9895, Generator Loss: 169.3008 Batch 105/782, Critic Loss: 47.3665, Generator Loss: 168.5904 Batch 106/782, Critic Loss: 76.4425, Generator Loss: 162.8402 Batch 107/782, Critic Loss: 38.6389, Generator Loss: 164.0242 Batch 108/782, Critic Loss: 40.7697, Generator Loss: 170.6839 Batch 109/782, Critic Loss: 45.6705, Generator Loss: 163.1099 Batch 110/782, Critic Loss: 40.2951, Generator Loss: 165.6651 Batch 111/782, Critic Loss: 47.6652, Generator Loss: 169.0233 Batch 112/782, Critic Loss: 32.8445, Generator Loss: 170.1386 Batch 113/782, Critic Loss: 47.2182, Generator Loss: 168.5443 Batch 114/782, Critic Loss: 38.8050, Generator Loss: 172.9630 Batch 115/782, Critic Loss: 48.5815, Generator Loss: 164.6923 Batch 116/782, Critic Loss: 112.3627, Generator Loss: 166.0912 Batch 117/782, Critic Loss: 50.4958, Generator Loss: 167.3158 Batch 118/782, Critic Loss: 36.8789, Generator Loss: 167.1081 Batch 119/782, Critic Loss: 41.5416, Generator Loss: 166.2266 Batch 120/782, Critic Loss: 50.6655, Generator Loss: 167.7398 Batch 121/782, Critic Loss: 63.9330, Generator Loss: 162.2113 Batch 122/782, Critic Loss: 107.1698, Generator Loss: 159.7107 Batch 123/782, Critic Loss: 65.5050, Generator Loss: 161.1564 Batch 124/782, Critic Loss: 26.0090, Generator Loss: 162.3306 Batch 125/782, Critic Loss: 54.7368, Generator Loss: 158.0382 Batch 126/782, Critic Loss: 54.0857, Generator Loss: 159.3860 Batch 127/782, Critic Loss: 24.5695, Generator Loss: 159.7789 Batch 128/782, Critic Loss: 66.8945, Generator Loss: 155.7723 Batch 129/782, Critic Loss: 55.2119, Generator Loss: 161.1770 Batch 130/782, Critic Loss: 30.4125, Generator Loss: 165.0516 Batch 131/782, Critic Loss: 46.8250, Generator Loss: 155.6942 Batch 132/782, Critic Loss: 50.9567, Generator Loss: 159.1183 Batch 133/782, Critic Loss: 32.3818, Generator Loss: 156.5087 Batch 134/782, Critic Loss: 74.3267, Generator Loss: 160.4466 Batch 135/782, Critic Loss: 33.3671, Generator Loss: 159.0713 Batch 136/782, Critic Loss: 18.9812, Generator Loss: 156.3442 Batch 137/782, Critic Loss: 26.7077, Generator Loss: 159.6648 Batch 138/782, Critic Loss: 32.2172, Generator Loss: 157.5047 Batch 139/782, Critic Loss: 16.3342, Generator Loss: 158.9164 Batch 140/782, Critic Loss: 29.5997, Generator Loss: 157.4995 Batch 141/782, Critic Loss: 50.3941, Generator Loss: 157.4523 Batch 142/782, Critic Loss: 18.5171, Generator Loss: 161.7506 Batch 143/782, Critic Loss: 44.3877, Generator Loss: 159.5203 Batch 144/782, Critic Loss: 23.9358, Generator Loss: 159.4289 Batch 145/782, Critic Loss: 33.6636, Generator Loss: 156.8857 Batch 146/782, Critic Loss: 42.5051, Generator Loss: 157.7996 Batch 147/782, Critic Loss: 34.0225, Generator Loss: 159.7003 Batch 148/782, Critic Loss: 30.7282, Generator Loss: 162.2082 Batch 149/782, Critic Loss: 35.0272, Generator Loss: 155.6708 Batch 150/782, Critic Loss: 49.1913, Generator Loss: 158.0500 Batch 151/782, Critic Loss: 21.1533, Generator Loss: 159.6813 Batch 152/782, Critic Loss: 21.4831, Generator Loss: 158.1164 Batch 153/782, Critic Loss: 23.4126, Generator Loss: 159.9972 Batch 154/782, Critic Loss: 38.2545, Generator Loss: 155.8857 Batch 155/782, Critic Loss: 23.0146, Generator Loss: 163.2430 Batch 156/782, Critic Loss: 32.6529, Generator Loss: 160.9016 Batch 157/782, Critic Loss: 33.0038, Generator Loss: 159.3133 Batch 158/782, Critic Loss: 15.0438, Generator Loss: 157.4112 Batch 159/782, Critic Loss: 22.6127, Generator Loss: 160.0040 Batch 160/782, Critic Loss: 21.0363, Generator Loss: 160.1029 Batch 161/782, Critic Loss: 25.9497, Generator Loss: 158.0474 Batch 162/782, Critic Loss: 16.5111, Generator Loss: 161.7287 Batch 163/782, Critic Loss: 27.2675, Generator Loss: 161.1240 Batch 164/782, Critic Loss: 31.6294, Generator Loss: 161.4682 Batch 165/782, Critic Loss: 13.7004, Generator Loss: 157.1373 Batch 166/782, Critic Loss: 20.2738, Generator Loss: 158.8464 Batch 167/782, Critic Loss: 44.1535, Generator Loss: 158.0551 Batch 168/782, Critic Loss: 16.3362, Generator Loss: 158.1682 Batch 169/782, Critic Loss: 6.5456, Generator Loss: 161.8379 Batch 170/782, Critic Loss: 53.9895, Generator Loss: 159.4901 Batch 171/782, Critic Loss: 22.3133, Generator Loss: 160.4373 Batch 172/782, Critic Loss: 16.0287, Generator Loss: 160.5514 Batch 173/782, Critic Loss: 22.0337, Generator Loss: 160.2585 Batch 174/782, Critic Loss: 11.4437, Generator Loss: 155.8236 Batch 175/782, Critic Loss: 10.4863, Generator Loss: 161.7590 Batch 176/782, Critic Loss: 11.6102, Generator Loss: 161.2619
In [ ]:
# #Save the trained models and optimizers
# torch.save({
# 'epoch': epoch,
# 'generator_state_dict': generator.state_dict(),
# 'critic_state_dict': critic.state_dict(),
# 'optimizer_G_state_dict': optimizer_G.state_dict(),
# 'optimizer_C_state_dict': optimizer_C.state_dict(),
# 'losses': (gen_losses, critic_losses)
# }, 'wgan_model_v1.pth')
Run 3- tweaking Hyperparameters(Successfull)¶
In [4]:
import torch
import torch.nn as nn
import torch.optim as optim
import torchvision
from torch.utils.data import Dataset, DataLoader
from PIL import Image, UnidentifiedImageError
import os
import matplotlib.pyplot as plt
import time
from torchvision import transforms
# Check if CUDA is available
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Custom Dataset for Yelp Photos
class YelpPhotoDataset(Dataset):
def __init__(self, photos_folder, transform=None, max_samples=None):
self.photos_folder = photos_folder
self.transform = transform
self.image_files = [f for f in os.listdir(photos_folder) if f.endswith('.jpg')] # Only jpg images
if max_samples:
self.image_files = self.image_files[:max_samples] # Limit number of samples
def __getitem__(self, idx):
img_path = os.path.join(self.photos_folder, self.image_files[idx])
try:
image = Image.open(img_path).convert('RGB') # Open image
if self.transform:
image = self.transform(image) # Apply the transformation pipeline
return image
except (UnidentifiedImageError, IOError) as e:
# Catch errors related to invalid or unreadable images
print(f"Error loading image {img_path}: {e}")
return None # Return None if there's an error loading the image
def __len__(self):
return len(self.image_files)
# Custom collate function to filter out None values from batches
def collate_fn(batch):
batch = [item for item in batch if item is not None] # Remove None values
return torch.stack(batch, 0) if len(batch) > 0 else None # Only stack if there's valid data
class Generator(nn.Module):
def __init__(self, latent_dim=128):
super(Generator, self).__init__()
self.init_size = 64 // 4 # Output size after upscaling
self.fc = nn.Sequential(
nn.Linear(latent_dim, 128 * self.init_size ** 2), # Input size now matches latent_dim
nn.BatchNorm1d(128 * self.init_size ** 2),
nn.LeakyReLU(0.2, inplace=True),
)
self.conv_blocks = nn.Sequential(
nn.Upsample(scale_factor=2), # 16x16 -> 32x32
nn.Conv2d(128, 128, 3, stride=1, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2, inplace=True),
nn.Upsample(scale_factor=2), # 32x32 -> 64x64
nn.Conv2d(128, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(64, 3, 3, stride=1, padding=1),
nn.Tanh(), # Output values in range [-1, 1]
)
def forward(self, z):
out = self.fc(z)
out = out.view(out.size(0), 128, self.init_size, self.init_size)
img = self.conv_blocks(out)
return img
# Define Critic Model
class Critic(nn.Module):
def __init__(self, img_channels, img_size):
super(Critic, self).__init__()
conv1_output = (img_size - 3 + 2*1) // 2 + 1
conv2_output = (conv1_output - 3 + 2*1) // 2 + 1
conv3_output = (conv2_output - 3 + 2*1) // 2 + 1
conv4_output = (conv3_output - 3 + 2*1) // 2 + 1
flattened_size = conv4_output * conv4_output * 512 # 512 channels after the last Conv layer
self.model = nn.Sequential(
nn.Conv2d(img_channels, 64, kernel_size=3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(256, 512, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.2, inplace=True)
)
self.fc = nn.Linear(flattened_size, 1)
def forward(self, img):
x = self.model(img)
x = x.view(x.size(0), -1)
output = self.fc(x)
return output
# Gradient Penalty Calculation for WGAN-GP
def compute_gradient_penalty(critic, real_samples, fake_samples):
alpha = torch.rand(real_samples.size(0), 1, 1, 1, device=real_samples.device)
interpolates = (alpha * real_samples + (1 - alpha) * fake_samples).requires_grad_(True)
d_interpolates = critic(interpolates)
fake = torch.ones(d_interpolates.size(), device=real_samples.device)
gradients = torch.autograd.grad(
outputs=d_interpolates,
inputs=interpolates,
grad_outputs=fake,
create_graph=True,
retain_graph=True,
only_inputs=True
)[0]
gradients = gradients.view(gradients.size(0), -1)
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean()
return gradient_penalty
# Save generated images for visualization
def save_generated_images(epoch, fixed_z, display=False):
generator.eval()
with torch.no_grad():
samples = generator(fixed_z).cpu()
grid = torchvision.utils.make_grid(samples, normalize=True, scale_each=True)
if display:
# Display image
plt.imshow(grid.permute(1, 2, 0))
plt.title(f"Epoch {epoch + 1}")
plt.show()
else:
# Save image
plt.imshow(grid.permute(1, 2, 0))
plt.title(f"Epoch {epoch + 1}")
plt.savefig(f"generated_images_epoch_{epoch + 1}.png")
plt.close()
generator.train()
def save_models(generator, critic, epoch, path="wgan_models.pth"):
torch.save({
'generator': generator.state_dict(),
'critic': critic.state_dict(),
}, path)
print(f"Models saved at epoch {epoch} to {path}.")
# Hyperparameters
latent_dim = 128
img_size = 64
img_channels = 3
epochs = 50
batch_size = 64
critic_iters = 5
lambda_gp = 10
lr = 0.00005
# Image Transformation
transform = transforms.Compose([
transforms.Resize((64, 64)),
transforms.ToTensor(),
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) # Normalize to [-1, 1]
])
# Initialize models
generator = Generator().to(device)
critic = Critic(img_channels=img_channels, img_size=img_size).to(device)
# Optimizers
optimizer_G = optim.Adam(generator.parameters(), lr=0.0001, betas=(0, 0.9))
optimizer_C = optim.Adam(critic.parameters(), lr=0.00005, betas=(0, 0.9))
# Dataset and DataLoader
photos_folder = 'C:/Users/singh/Downloads/yelp_dataset/photos' # Update with correct path
dataset = YelpPhotoDataset(photos_folder, transform=transform, max_samples=500)
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0, collate_fn=collate_fn)
# Fixed noise for consistent image generation
fixed_z = torch.randn(64, latent_dim, device=device)
# Loss tracking
gen_losses = []
critic_losses = []
# Training loop
for epoch in range(epochs):
epoch_gen_loss = 0
epoch_critic_loss = 0
for i, imgs in enumerate(dataloader):
if imgs is None:
continue
batch_size = imgs.size(0)
imgs = imgs.to(device)
# Train Critic
for _ in range(critic_iters):
optimizer_C.zero_grad()
z = torch.randn(batch_size, latent_dim, device=device)
fake_imgs = generator(z)
critic_real = critic(imgs)
critic_fake = critic(fake_imgs.detach())
gradient_penalty = compute_gradient_penalty(critic, imgs, fake_imgs)
critic_loss = critic_fake.mean() - critic_real.mean() + lambda_gp * gradient_penalty
critic_loss.backward()
torch.nn.utils.clip_grad_norm_(critic.parameters(), max_norm=10)
optimizer_C.step()
epoch_critic_loss += critic_loss.item()
# Train Generator
optimizer_G.zero_grad()
z = torch.randn(batch_size, latent_dim, device=device)
fake_imgs = generator(z)
critic_fake = critic(fake_imgs)
gen_loss = -critic_fake.mean()
gen_loss.backward()
optimizer_G.step()
epoch_gen_loss += gen_loss.item()
# Log per-epoch losses
avg_gen_loss = epoch_gen_loss / len(dataloader)
avg_critic_loss = epoch_critic_loss / (len(dataloader) * critic_iters)
gen_losses.append(avg_gen_loss)
critic_losses.append(avg_critic_loss)
print(f"Epoch {epoch + 1}/{epochs}, Generator Loss: {avg_gen_loss:.4f}, Critic Loss: {avg_critic_loss:.4f}")
# Save and display generated images every 5 epochs
if (epoch + 1) % 5 == 0:
save_generated_images(epoch, fixed_z, display=True)
# Save the trained models and optimizers
save_models(generator, critic, epoch, path="wgan_models-50.pth")
Epoch 1/50, Generator Loss: 2.2421, Critic Loss: -1.0323 Epoch 2/50, Generator Loss: 7.5080, Critic Loss: -2.6833 Epoch 3/50, Generator Loss: 11.6908, Critic Loss: -3.3584 Epoch 4/50, Generator Loss: 13.3678, Critic Loss: -3.3493 Epoch 5/50, Generator Loss: 14.8769, Critic Loss: -2.9609
Epoch 6/50, Generator Loss: 14.4274, Critic Loss: -2.5233 Epoch 7/50, Generator Loss: 15.6008, Critic Loss: -2.2348 Epoch 8/50, Generator Loss: 15.6365, Critic Loss: -2.0750 Epoch 9/50, Generator Loss: 16.2778, Critic Loss: -1.9921 Epoch 10/50, Generator Loss: 16.9997, Critic Loss: -1.9736
Epoch 11/50, Generator Loss: 17.5213, Critic Loss: -2.0227 Epoch 12/50, Generator Loss: 17.5647, Critic Loss: -2.1836 Epoch 13/50, Generator Loss: 18.6831, Critic Loss: -2.3265 Epoch 14/50, Generator Loss: 20.4897, Critic Loss: -2.3888 Epoch 15/50, Generator Loss: 20.7121, Critic Loss: -2.6261
Epoch 16/50, Generator Loss: 23.4313, Critic Loss: -2.8692 Epoch 17/50, Generator Loss: 23.7581, Critic Loss: -3.1121 Epoch 18/50, Generator Loss: 27.2208, Critic Loss: -3.0371 Epoch 19/50, Generator Loss: 25.4083, Critic Loss: -4.3627 Epoch 20/50, Generator Loss: 29.1518, Critic Loss: -4.0164
Epoch 21/50, Generator Loss: 29.7793, Critic Loss: -4.4505 Epoch 22/50, Generator Loss: 30.0490, Critic Loss: -7.5255 Epoch 23/50, Generator Loss: 37.3917, Critic Loss: -6.0221 Epoch 24/50, Generator Loss: 43.0258, Critic Loss: -4.7791 Epoch 25/50, Generator Loss: 37.0824, Critic Loss: -5.4723
Epoch 26/50, Generator Loss: 35.8876, Critic Loss: -11.0190 Epoch 27/50, Generator Loss: 44.3364, Critic Loss: -7.6434 Epoch 28/50, Generator Loss: 53.3717, Critic Loss: -6.1865 Epoch 29/50, Generator Loss: 38.8225, Critic Loss: -11.9425 Epoch 30/50, Generator Loss: 43.9926, Critic Loss: -11.2873
Epoch 31/50, Generator Loss: 62.8406, Critic Loss: -7.4079 Epoch 32/50, Generator Loss: 40.9425, Critic Loss: -13.0333 Epoch 33/50, Generator Loss: 53.1142, Critic Loss: -9.8996 Epoch 34/50, Generator Loss: 59.8886, Critic Loss: -9.9485 Epoch 35/50, Generator Loss: 63.8827, Critic Loss: -8.5586
Epoch 36/50, Generator Loss: 38.3863, Critic Loss: -9.5982 Epoch 37/50, Generator Loss: 55.6881, Critic Loss: -9.7411 Epoch 38/50, Generator Loss: 39.2529, Critic Loss: -13.7638 Epoch 39/50, Generator Loss: 42.6426, Critic Loss: -11.2769 Epoch 40/50, Generator Loss: 62.6985, Critic Loss: -14.2542
Epoch 41/50, Generator Loss: 65.9750, Critic Loss: -14.5022 Epoch 42/50, Generator Loss: 41.2353, Critic Loss: -13.9236 Epoch 43/50, Generator Loss: 47.6740, Critic Loss: -16.1097 Epoch 44/50, Generator Loss: 100.2226, Critic Loss: -13.4998 Epoch 45/50, Generator Loss: 45.5811, Critic Loss: -17.4495
Epoch 46/50, Generator Loss: 48.7908, Critic Loss: -15.6927 Epoch 47/50, Generator Loss: 107.2313, Critic Loss: -14.6886 Epoch 48/50, Generator Loss: 41.5771, Critic Loss: -15.7398 Epoch 49/50, Generator Loss: 105.6990, Critic Loss: -14.7015 Epoch 50/50, Generator Loss: 46.8123, Critic Loss: -16.2813
Models saved at epoch 49 to wgan_models-50.pth.
In [5]:
import torch
import numpy as np
from torchvision.models import inception_v3
from torch.nn.functional import softmax
from scipy.linalg import sqrtm
from torchvision import transforms
from tqdm import tqdm
# Inception Score (IS) calculation
def calculate_inception_score(generator, latent_dim, n_samples=5000, batch_size=64, device='cuda'):
generator.eval()
inception_model = inception_v3(pretrained=True, transform_input=False).to(device)
inception_model.eval()
transform = transforms.Compose([
transforms.Resize((299, 299)), # Inception requires 299x299
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]),
])
preds = []
with torch.no_grad():
for _ in tqdm(range(n_samples // batch_size), desc="Calculating IS"):
z = torch.randn(batch_size, latent_dim, device=device)
generated_images = generator(z)
generated_images = (generated_images + 1) / 2 # Rescale to [0, 1]
transformed_images = torch.stack([transform(img) for img in generated_images])
logits = inception_model(transformed_images)
probs = softmax(logits, dim=1)
preds.append(probs.cpu().numpy())
preds = np.concatenate(preds, axis=0)
p_y = np.mean(preds, axis=0)
kl_divs = preds * (np.log(preds) - np.log(p_y[None, :]))
inception_score = np.exp(np.mean(np.sum(kl_divs, axis=1)))
return inception_score
# FID calculation
def calculate_fid(generator, dataloader, latent_dim, n_samples=5000, device='cuda'):
generator.eval()
inception_model = inception_v3(pretrained=True, transform_input=False).to(device)
inception_model.eval()
transform = transforms.Compose([
transforms.Resize((299, 299)), # Inception requires 299x299
])
def get_features(images):
images = (images + 1) / 2 # Rescale to [0, 1]
transformed_images = torch.stack([transform(img) for img in images])
features = inception_model(transformed_images)
return features.detach().cpu().numpy()
real_features = []
fake_features = []
# Collect real features
for real_imgs in tqdm(dataloader, desc="Collecting real features"):
real_imgs = real_imgs.to(device)
real_features.append(get_features(real_imgs))
if len(real_features) * batch_size >= n_samples:
break
# Collect fake features
with torch.no_grad():
for _ in tqdm(range(n_samples // batch_size), desc="Generating fake features"):
z = torch.randn(batch_size, latent_dim, device=device)
fake_imgs = generator(z)
fake_features.append(get_features(fake_imgs))
real_features = np.concatenate(real_features[:n_samples // batch_size], axis=0)
fake_features = np.concatenate(fake_features[:n_samples // batch_size], axis=0)
mu_real = np.mean(real_features, axis=0)
sigma_real = np.cov(real_features, rowvar=False)
mu_fake = np.mean(fake_features, axis=0)
sigma_fake = np.cov(fake_features, rowvar=False)
# Calculate FID
diff = mu_real - mu_fake
covmean = sqrtm(sigma_real.dot(sigma_fake))
if np.iscomplexobj(covmean):
covmean = covmean.real
fid = np.sum(diff**2) + np.trace(sigma_real + sigma_fake - 2 * covmean)
return fid
# Example usage:
# Load the saved models
model_path = "wgan_models-50.pth"
checkpoint = torch.load(model_path)
generator.load_state_dict(checkpoint['generator'])
# Calculate Inception Score
is_score = calculate_inception_score(generator, latent_dim=128, n_samples=1000, device=device)
print(f"Inception Score (IS): {is_score:.4f}")
# Calculate FID
fid_score = calculate_fid(generator, dataloader, latent_dim=128, n_samples=1000, device=device)
print(f"Fréchet Inception Distance (FID): {fid_score:.4f}")
C:\Users\singh\AppData\Local\Temp\ipykernel_20264\4074231032.py:92: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. checkpoint = torch.load(model_path) c:\Config\envs\studies\Lib\site-packages\torchvision\models\_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead. warnings.warn( c:\Config\envs\studies\Lib\site-packages\torchvision\models\_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=Inception_V3_Weights.IMAGENET1K_V1`. You can also use `weights=Inception_V3_Weights.DEFAULT` to get the most up-to-date weights. warnings.warn(msg) Downloading: "https://download.pytorch.org/models/inception_v3_google-0cc3c7bd.pth" to C:\Users\singh/.cache\torch\hub\checkpoints\inception_v3_google-0cc3c7bd.pth 100%|██████████| 104M/104M [00:02<00:00, 48.2MB/s] Calculating IS: 100%|██████████| 15/15 [00:39<00:00, 2.60s/it]
Inception Score (IS): 1.4292
Collecting real features: 100%|██████████| 8/8 [00:35<00:00, 4.47s/it] Generating fake features: 100%|██████████| 19/19 [00:41<00:00, 2.18s/it]
Fréchet Inception Distance (FID): 1973.8786
Run 4_ Tunning (Not As good As Run 2)- Kevin¶
In [ ]:
import torch
import torch.nn as nn
import torch.optim as optim
import torchvision
from torch.utils.data import Dataset, DataLoader
from PIL import Image, UnidentifiedImageError
import os
import matplotlib.pyplot as plt
import time
from torchvision import transforms
from torchmetrics.image.inception import InceptionScore
from scipy.linalg import sqrtm
import numpy as np
# Check if CUDA is available
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Custom Dataset for Yelp Photos
class YelpPhotoDataset(Dataset):
def __init__(self, photos_folder, transform=None, max_samples=None):
self.photos_folder = photos_folder
self.transform = transform
self.image_files = [f for f in os.listdir(photos_folder) if f.endswith('.jpg')] # Only jpg images
if max_samples:
self.image_files = self.image_files[:max_samples] # Limit number of samples
def __getitem__(self, idx):
img_path = os.path.join(self.photos_folder, self.image_files[idx])
try:
image = Image.open(img_path).convert('RGB') # Open image
if self.transform:
image = self.transform(image) # Apply the transformation pipeline
return image
except (UnidentifiedImageError, IOError) as e:
# Catch errors related to invalid or unreadable images
print(f"Error loading image {img_path}: {e}")
return None # Return None if there's an error loading the image
def __len__(self):
return len(self.image_files)
# Custom collate function to filter out None values from batches
def collate_fn(batch):
batch = [item for item in batch if item is not None] # Remove None values
return torch.stack(batch, 0) if len(batch) > 0 else None # Only stack if there's valid data
# Generator Model
class Generator(nn.Module):
def __init__(self, latent_dim=128):
super(Generator, self).__init__()
self.init_size = 64 // 4 # Output size after upscaling
self.fc = nn.Sequential(
nn.Linear(latent_dim, 128 * self.init_size ** 2), # Input size now matches latent_dim
nn.BatchNorm1d(128 * self.init_size ** 2),
nn.LeakyReLU(0.2, inplace=True),
)
self.conv_blocks = nn.Sequential(
nn.Upsample(scale_factor=2), # 16x16 -> 32x32
nn.Conv2d(128, 128, 3, stride=1, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2, inplace=True),
nn.Upsample(scale_factor=2), # 32x32 -> 64x64
nn.Conv2d(128, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(64, 3, 3, stride=1, padding=1),
nn.Tanh(), # Output values in range [-1, 1]
)
def forward(self, z):
out = self.fc(z)
out = out.view(out.size(0), 128, self.init_size, self.init_size)
img = self.conv_blocks(out)
return img
# Critic Model
class Critic(nn.Module):
def __init__(self, img_channels, img_size):
super(Critic, self).__init__()
conv1_output = (img_size - 3 + 2*1) // 2 + 1
conv2_output = (conv1_output - 3 + 2*1) // 2 + 1
conv3_output = (conv2_output - 3 + 2*1) // 2 + 1
conv4_output = (conv3_output - 3 + 2*1) // 2 + 1
flattened_size = conv4_output * conv4_output * 512 # 512 channels after the last Conv layer
self.model = nn.Sequential(
nn.Conv2d(img_channels, 64, kernel_size=3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(128, 256, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(256, 512, kernel_size=3, stride=2, padding=1),
nn.BatchNorm2d(512),
nn.LeakyReLU(0.2, inplace=True)
)
self.fc = nn.Linear(flattened_size, 1)
def forward(self, img):
x = self.model(img)
x = x.view(x.size(0), -1)
output = self.fc(x)
return output
# Gradient Penalty Calculation for WGAN-GP
def compute_gradient_penalty(critic, real_samples, fake_samples):
alpha = torch.rand(real_samples.size(0), 1, 1, 1, device=real_samples.device)
interpolates = (alpha * real_samples + (1 - alpha) * fake_samples).requires_grad_(True)
d_interpolates = critic(interpolates)
fake = torch.ones(d_interpolates.size(), device=real_samples.device)
gradients = torch.autograd.grad(
outputs=d_interpolates,
inputs=interpolates,
grad_outputs=fake,
create_graph=True,
retain_graph=True,
only_inputs=True
)[0]
gradients = gradients.view(gradients.size(0), -1)
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean()
return gradient_penalty
# Save generated images for visualization
def save_generated_images(epoch, fixed_z, display=False):
generator.eval()
with torch.no_grad():
samples = generator(fixed_z).cpu()
grid = torchvision.utils.make_grid(samples, normalize=True, scale_each=True)
if display:
# Display image
plt.imshow(grid.permute(1, 2, 0))
plt.title(f"Epoch {epoch + 1}")
plt.show()
else:
# Save image
plt.imshow(grid.permute(1, 2, 0))
plt.title(f"Epoch {epoch + 1}")
plt.savefig(f"generated_images_epoch_{epoch + 1}.png")
plt.close()
generator.train()
# Hyperparameters
latent_dim = 128
img_size = 64
img_channels = 3
epochs = 50
batch_size = 32 # Reduced batch size
critic_iters = 5 # Reduced critic iterations
lambda_gp = 5 # Adjusted gradient penalty
lr = 0.000005 # Learning rate for both generator and critic
# Image Transformation with Data Augmentation
transform = transforms.Compose([
transforms.Resize((64, 64)),
transforms.RandomHorizontalFlip(),
transforms.RandomRotation(10),
transforms.ToTensor(),
transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]) # Normalize to [-1, 1]
])
# Initialize models
generator = Generator(latent_dim=latent_dim).to(device)
critic = Critic(img_channels=img_channels, img_size=img_size).to(device)
# Optimizers
optimizer_G = optim.Adam(generator.parameters(), lr=lr, betas=(0, 0.9))
optimizer_C = optim.Adam(critic.parameters(), lr=lr, betas=(0, 0.9))
# Dataset and DataLoader
photos_folder = 'C:/Users/singh/Downloads/yelp_dataset/photos' # Update with correct path
dataset = YelpPhotoDataset(photos_folder, transform=transform, max_samples=800)
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=0, collate_fn=collate_fn)
# Fixed noise for consistent image generation
fixed_z = torch.randn(64, latent_dim, device=device)
# Loss tracking
gen_losses = []
critic_losses = []
# Initialize Inception Score (IS) metric without device and split arguments
inception_score_metric = InceptionScore(feature='logits_unbiased', normalize=True, splits=10)
# Training loop
for epoch in range(epochs):
epoch_gen_loss = 0
epoch_critic_loss = 0
for i, imgs in enumerate(dataloader):
if imgs is None:
continue
batch_size = imgs.size(0)
imgs = imgs.to(device)
# Train Critic
for _ in range(critic_iters):
optimizer_C.zero_grad()
# Real and fake images
real_imgs = imgs
z = torch.randn(batch_size, latent_dim, device=device)
fake_imgs = generator(z)
# Compute critic loss and gradient penalty
real_validity = critic(real_imgs)
fake_validity = critic(fake_imgs)
gradient_penalty = compute_gradient_penalty(critic, real_imgs, fake_imgs)
critic_loss = fake_validity.mean() - real_validity.mean() + lambda_gp * gradient_penalty
critic_loss.backward()
optimizer_C.step()
epoch_critic_loss += critic_loss.item()
# Train Generator
optimizer_G.zero_grad()
fake_imgs = generator(z)
gen_loss = -critic(fake_imgs).mean()
gen_loss.backward()
optimizer_G.step()
epoch_gen_loss += gen_loss.item()
# Print losses and save generated images
print(f"Epoch [{epoch + 1}/{epochs}], Gen Loss: {epoch_gen_loss / len(dataloader):.4f}, Critic Loss: {epoch_critic_loss / len(dataloader):.4f}")
gen_losses.append(epoch_gen_loss / len(dataloader))
critic_losses.append(epoch_critic_loss / len(dataloader))
if (epoch + 1) % 10 == 0: # Save generated images every 10 epochs
save_generated_images(epoch, fixed_z, display=True)
# Compute Inception Score (IS) after each epoch
generated_images = generator(fixed_z).cpu() # Ensure images are moved to CPU for IS computation
inception_score, _ = inception_score_metric(generated_images)
print(f"Inception Score (IS): {inception_score.mean().item():.4f}")
# Save final generated images
save_generated_images(epochs - 1, fixed_z, display=False)
# Optionally, plot the loss curves
plt.plot(gen_losses, label="Generator Loss")
plt.plot(critic_losses, label="Critic Loss")
plt.legend()
plt.show()
Epoch [1/50], Gen Loss: 5.9494, Critic Loss: -31.1289 Inception Score (IS): 1.2216 Epoch [2/50], Gen Loss: 12.9663, Critic Loss: -57.8951 Inception Score (IS): 1.4389 Epoch [3/50], Gen Loss: 18.9601, Critic Loss: -65.5024 Inception Score (IS): 1.5236 Epoch [4/50], Gen Loss: 23.4282, Critic Loss: -67.8468 Inception Score (IS): 1.5484 Epoch [5/50], Gen Loss: 25.9279, Critic Loss: -68.0163 Inception Score (IS): 1.5302 Epoch [6/50], Gen Loss: 27.6564, Critic Loss: -67.7891 Inception Score (IS): 1.4629 Epoch [7/50], Gen Loss: 30.7475, Critic Loss: -73.3178 Inception Score (IS): 1.5677 Epoch [8/50], Gen Loss: 33.7682, Critic Loss: -77.9924 Inception Score (IS): 1.5944 Epoch [9/50], Gen Loss: 35.9436, Critic Loss: -83.7564 Inception Score (IS): 1.5451 Epoch [10/50], Gen Loss: 40.0999, Critic Loss: -135.7329
Inception Score (IS): 1.5030 Epoch [11/50], Gen Loss: 47.4654, Critic Loss: -88.9374 Inception Score (IS): 1.4210 Epoch [12/50], Gen Loss: 47.8825, Critic Loss: -104.5066 Inception Score (IS): 1.5342 Epoch [13/50], Gen Loss: 45.3538, Critic Loss: -211.8452 Inception Score (IS): 1.5846 Epoch [14/50], Gen Loss: 58.5691, Critic Loss: -147.8329 Inception Score (IS): 1.3619 Epoch [15/50], Gen Loss: 66.6958, Critic Loss: -121.1654 Inception Score (IS): 1.3893 Epoch [16/50], Gen Loss: 54.1524, Critic Loss: -253.2270 Inception Score (IS): 1.5999 Epoch [17/50], Gen Loss: 61.5096, Critic Loss: -233.0470 Inception Score (IS): 1.4946 Epoch [18/50], Gen Loss: 90.1001, Critic Loss: -93.8621 Inception Score (IS): 1.3899 Epoch [19/50], Gen Loss: 88.3304, Critic Loss: -164.4743 Inception Score (IS): 1.4446 Epoch [20/50], Gen Loss: 64.2697, Critic Loss: -247.3827
Inception Score (IS): 1.6336 Epoch [21/50], Gen Loss: 73.3023, Critic Loss: -443.1093 Inception Score (IS): 1.5813 Epoch [22/50], Gen Loss: 82.4426, Critic Loss: -70.8524 Inception Score (IS): 1.4700 Epoch [23/50], Gen Loss: 119.0387, Critic Loss: -194.6549 Inception Score (IS): 1.3512 Epoch [24/50], Gen Loss: 82.9217, Critic Loss: -310.2445 Inception Score (IS): 1.6677 Epoch [25/50], Gen Loss: 82.4137, Critic Loss: -531.0269 Inception Score (IS): 1.6429 Epoch [26/50], Gen Loss: 106.8668, Critic Loss: -218.5645 Inception Score (IS): 1.5263 Epoch [27/50], Gen Loss: 128.6771, Critic Loss: -170.2807 Inception Score (IS): 1.4307 Epoch [28/50], Gen Loss: 131.3460, Critic Loss: -246.9075 Inception Score (IS): 1.4275 Epoch [29/50], Gen Loss: 131.6768, Critic Loss: -279.5075 Inception Score (IS): 1.4446 Epoch [30/50], Gen Loss: 139.7991, Critic Loss: -301.6005
Inception Score (IS): 1.5047 Epoch [31/50], Gen Loss: 72.3712, Critic Loss: -357.7679 Inception Score (IS): 1.4236 Epoch [32/50], Gen Loss: 101.2127, Critic Loss: -421.3685 Inception Score (IS): 1.6563 Epoch [33/50], Gen Loss: 94.3559, Critic Loss: -718.5463 Inception Score (IS): 1.7045 Epoch [34/50], Gen Loss: 121.3799, Critic Loss: -710.0978 Inception Score (IS): 1.5642 Epoch [35/50], Gen Loss: 161.4181, Critic Loss: -248.4785 Inception Score (IS): 1.3266 Epoch [36/50], Gen Loss: 189.5891, Critic Loss: -300.0434 Inception Score (IS): 1.3658 Epoch [37/50], Gen Loss: 135.2936, Critic Loss: -545.2798 Inception Score (IS): 1.4735 Epoch [38/50], Gen Loss: 123.8905, Critic Loss: -922.7663 Inception Score (IS): 1.5369 Epoch [39/50], Gen Loss: 146.5873, Critic Loss: -918.9191 Inception Score (IS): 1.3342 Epoch [40/50], Gen Loss: 206.1419, Critic Loss: -161.3649
Inception Score (IS): 1.2493 Epoch [41/50], Gen Loss: 212.9338, Critic Loss: -338.5400 Inception Score (IS): 1.3709 Epoch [42/50], Gen Loss: 213.5463, Critic Loss: -257.4294 Inception Score (IS): 1.3557 Epoch [43/50], Gen Loss: 147.3584, Critic Loss: -453.7014 Inception Score (IS): 1.7200 Epoch [44/50], Gen Loss: 132.6471, Critic Loss: -961.8471 Inception Score (IS): 1.8039 Epoch [45/50], Gen Loss: 167.5856, Critic Loss: -1217.4062 Inception Score (IS): 1.6867 Epoch [46/50], Gen Loss: 200.7677, Critic Loss: -745.4336 Inception Score (IS): 1.3426 Epoch [47/50], Gen Loss: 263.4852, Critic Loss: -314.1031 Inception Score (IS): 1.3491 Epoch [48/50], Gen Loss: 220.4469, Critic Loss: -418.8322 Inception Score (IS): 1.3633 Epoch [49/50], Gen Loss: 187.3271, Critic Loss: -251.9864 Inception Score (IS): 1.3032 Epoch [50/50], Gen Loss: 241.4441, Critic Loss: -479.6991
Inception Score (IS): 1.3757
Loading Best Model & Generating Images With It¶
In [7]:
import torch
from torchvision.utils import save_image
import os
import matplotlib.pyplot as plt
# Load the trained Generator
model_path = "wgan_models-50.pth" # Path to the saved model
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Initialize Generator
latent_dim = 128 # Ensure this matches your training setup
generator = Generator(latent_dim=latent_dim).to(device)
# Load trained weights
checkpoint = torch.load(model_path, map_location=device)
generator.load_state_dict(checkpoint['generator'])
# Set generator to evaluation mode
generator.eval()
# Function to generate and save images
def generate_images(generator, num_images=64, save_path="generated_images"):
os.makedirs(save_path, exist_ok=True) # Ensure the directory exists
with torch.no_grad():
# Generate random noise
z = torch.randn(num_images, latent_dim, device=device)
# Generate images
fake_images = generator(z)
# Rescale images to [0, 1] from [-1, 1]
fake_images = (fake_images + 1) / 2
# Save images individually
for i, img in enumerate(fake_images):
save_image(img, os.path.join(save_path, f"generated_{i + 1}.png"))
print(f"Generated {num_images} images saved to {save_path}")
# Generate and save images
generate_images(generator, num_images=64)
# Display a grid of the generated images
def display_images(save_path, grid_size=(8, 8)):
# Load generated images
image_paths = [os.path.join(save_path, f) for f in os.listdir(save_path) if f.endswith(".png")]
image_paths = sorted(image_paths) # Ensure the images are ordered
# Create a grid
fig, axes = plt.subplots(*grid_size, figsize=(12, 12))
for ax, img_path in zip(axes.flatten(), image_paths):
img = plt.imread(img_path)
ax.imshow(img)
ax.axis('off')
plt.tight_layout()
plt.show()
# Display the generated images in a grid
display_images(save_path="generated_images", grid_size=(8, 8))
C:\Users\singh\AppData\Local\Temp\ipykernel_20264\863512167.py:15: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. checkpoint = torch.load(model_path, map_location=device)
Generated 64 images saved to generated_images